diff --git a/arctic/serialization/numpy_arrays.py b/arctic/serialization/numpy_arrays.py index 711c9571..2844493e 100644 --- a/arctic/serialization/numpy_arrays.py +++ b/arctic/serialization/numpy_arrays.py @@ -116,7 +116,14 @@ def docify(self, df): dtypes[str(c)] = arr.dtype.str if mask is not None: masks[str(c)] = Binary(compress(mask.tostring())) - arrays.append(arr.tostring()) + # try: + # serialized = arr.tostring() + # except: + # import pickle + # pickle.dumps(arr, protocol=4) + import pickle + serialized = pickle.dumps(arr) + arrays.append(serialized) except Exception as e: typ = infer_dtype(df[c], skipna=False) msg = "Column '{}' type is {}".format(str(c), typ) @@ -154,7 +161,11 @@ def objify(self, doc, columns=None): else: d = decompress(doc[DATA][doc[METADATA][LENGTHS][col][0]: doc[METADATA][LENGTHS][col][1] + 1]) # d is ready-only but that's not an issue since DataFrame will copy the data anyway. - d = np.frombuffer(d, doc[METADATA][DTYPE][col]) + try: + import pickle + d = pickle.loads(d) + except: + d = np.frombuffer(d, doc[METADATA][DTYPE][col]) if MASK in doc[METADATA] and col in doc[METADATA][MASK]: mask_data = decompress(doc[METADATA][MASK][col]) diff --git a/arctic/store/_pandas_ndarray_store.py b/arctic/store/_pandas_ndarray_store.py index 6903bfee..14ce8558 100644 --- a/arctic/store/_pandas_ndarray_store.py +++ b/arctic/store/_pandas_ndarray_store.py @@ -217,7 +217,8 @@ class PandasPanelStore(PandasDataFrameStore): @staticmethod def can_write_type(data): - return isinstance(data, Panel) + pass + # return isinstance(data, Panel) def can_write(self, version, symbol, data): if self.can_write_type(data): diff --git a/setup.py b/setup.py index a31553f9..c60e489a 100644 --- a/setup.py +++ b/setup.py @@ -77,7 +77,7 @@ def run_tests(self): "enum-compat", "futures; python_version == '2.7'", "mockextras", - "pandas<=1.0.3", + # "pandas<=1.0.3", "pymongo>=3.6.0", "python-dateutil", "pytz",