You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
when I run featuretools.save_features(feature_def,f),
If I used a default dataset, it's working fine.
If I used feature_def generated from my own data, I got:
with GPT prompt, I checked exported feature from the default dataset, it also has:
"metadata": {"dataframe_name": "transactions", "entityset_id": "transactions"}}, {"name": "product_id", "ordinal": 3, "use_standard_tags": true, "logical_type": {"parameters": {}, "type": "Categorical"},
so still don't have a clue...
Any inputs appreciated, thanks!
TypeError Traceback (most recent call last)
Cell In[107], line 8
6 print(type(feature_defs_1),type(transformed_features),feature_defs_1[:5],transformed_features[:5])
7 with open(filepath,'w+') as f:
----> 8 ft.save_features(transformed_features,f)
File /opt/conda/lib/python3.10/site-packages/featuretools/feature_base/features_serializer.py:62, in save_features(features, location, profile_name)
10 def save_features(features, location=None, profile_name=None):
11 """Saves the features list as JSON to a specified filepath/S3 path, writes to an open file, or
12 returns the serialized features as a JSON string. If no file provided, returns a string.
13
(...)
60 :func:.load_features
61 """
---> 62 return FeaturesSerializer(features).save(location, profile_name=profile_name)
File /opt/conda/lib/python3.10/json/init.py:179, in dump(obj, fp, skipkeys, ensure_ascii, check_circular, allow_nan, cls, indent, separators, default, sort_keys, **kw)
173 iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
174 check_circular=check_circular, allow_nan=allow_nan, indent=indent,
175 separators=separators,
176 default=default, sort_keys=sort_keys, **kw).iterencode(obj)
177 # could accelerate with writelines in some versions of Python, at
178 # a debuggability cost
--> 179 for chunk in iterable:
180 fp.write(chunk)
File /opt/conda/lib/python3.10/json/encoder.py:431, in _make_iterencode.._iterencode(o, _current_indent_level)
429 yield from _iterencode_list(o, _current_indent_level)
430 elif isinstance(o, dict):
--> 431 yield from _iterencode_dict(o, _current_indent_level)
432 else:
433 if markers is not None:
File /opt/conda/lib/python3.10/json/encoder.py:405, in _make_iterencode.._iterencode_dict(dct, _current_indent_level)
403 else:
404 chunks = _iterencode(value, _current_indent_level)
--> 405 yield from chunks
406 if newline_indent is not None:
407 _current_indent_level -= 1
File /opt/conda/lib/python3.10/json/encoder.py:405, in _make_iterencode.._iterencode_dict(dct, _current_indent_level)
403 else:
404 chunks = _iterencode(value, _current_indent_level)
--> 405 yield from chunks
406 if newline_indent is not None:
407 _current_indent_level -= 1
[... skipping similar frames: _make_iterencode.<locals>._iterencode_dict at line 405 (1 times)]
File /opt/conda/lib/python3.10/json/encoder.py:405, in _make_iterencode.._iterencode_dict(dct, _current_indent_level)
403 else:
404 chunks = _iterencode(value, _current_indent_level)
--> 405 yield from chunks
406 if newline_indent is not None:
407 _current_indent_level -= 1
File /opt/conda/lib/python3.10/json/encoder.py:325, in _make_iterencode.._iterencode_list(lst, _current_indent_level)
323 else:
324 chunks = _iterencode(value, _current_indent_level)
--> 325 yield from chunks
326 if newline_indent is not None:
327 _current_indent_level -= 1
File /opt/conda/lib/python3.10/json/encoder.py:405, in _make_iterencode.._iterencode_dict(dct, _current_indent_level)
403 else:
404 chunks = _iterencode(value, _current_indent_level)
--> 405 yield from chunks
406 if newline_indent is not None:
407 _current_indent_level -= 1
File /opt/conda/lib/python3.10/json/encoder.py:405, in _make_iterencode.._iterencode_dict(dct, _current_indent_level)
403 else:
404 chunks = _iterencode(value, _current_indent_level)
--> 405 yield from chunks
406 if newline_indent is not None:
407 _current_indent_level -= 1
File /opt/conda/lib/python3.10/json/encoder.py:438, in _make_iterencode.._iterencode(o, _current_indent_level)
436 raise ValueError("Circular reference detected")
437 markers[markerid] = o
--> 438 o = _default(o)
439 yield from _iterencode(o, _current_indent_level)
440 if markers is not None:
File /opt/conda/lib/python3.10/json/encoder.py:179, in JSONEncoder.default(self, o)
160 def default(self, o):
161 """Implement this method in a subclass such that it returns
162 a serializable object for o, or calls the base implementation
163 (to raise a TypeError).
(...)
177
178 """
--> 179 raise TypeError(f'Object of type {o.class.name} '
180 f'is not JSON serializable')
TypeError: Object of type Categorical is not JSON serializable
The text was updated successfully, but these errors were encountered:
Hi,
when I run featuretools.save_features(feature_def,f),
If I used a default dataset, it's working fine.
If I used feature_def generated from my own data, I got:
with GPT prompt, I checked exported feature from the default dataset, it also has:
"metadata": {"dataframe_name": "transactions", "entityset_id": "transactions"}}, {"name": "product_id", "ordinal": 3, "use_standard_tags": true, "logical_type": {"parameters": {}, "type": "Categorical"},
so still don't have a clue...
Any inputs appreciated, thanks!
TypeError Traceback (most recent call last)
Cell In[107], line 8
6 print(type(feature_defs_1),type(transformed_features),feature_defs_1[:5],transformed_features[:5])
7 with open(filepath,'w+') as f:
----> 8 ft.save_features(transformed_features,f)
File /opt/conda/lib/python3.10/site-packages/featuretools/feature_base/features_serializer.py:62, in save_features(features, location, profile_name)
10 def save_features(features, location=None, profile_name=None):
11 """Saves the features list as JSON to a specified filepath/S3 path, writes to an open file, or
12 returns the serialized features as a JSON string. If no file provided, returns a string.
13
(...)
60 :func:
.load_features
61 """
---> 62 return FeaturesSerializer(features).save(location, profile_name=profile_name)
File /opt/conda/lib/python3.10/site-packages/featuretools/feature_base/features_serializer.py:104, in FeaturesSerializer.save(self, location, profile_name)
102 json.dump(features_dict, f)
103 else:
--> 104 json.dump(features_dict, location)
File /opt/conda/lib/python3.10/json/init.py:179, in dump(obj, fp, skipkeys, ensure_ascii, check_circular, allow_nan, cls, indent, separators, default, sort_keys, **kw)
173 iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
174 check_circular=check_circular, allow_nan=allow_nan, indent=indent,
175 separators=separators,
176 default=default, sort_keys=sort_keys, **kw).iterencode(obj)
177 # could accelerate with writelines in some versions of Python, at
178 # a debuggability cost
--> 179 for chunk in iterable:
180 fp.write(chunk)
File /opt/conda/lib/python3.10/json/encoder.py:431, in _make_iterencode.._iterencode(o, _current_indent_level)
429 yield from _iterencode_list(o, _current_indent_level)
430 elif isinstance(o, dict):
--> 431 yield from _iterencode_dict(o, _current_indent_level)
432 else:
433 if markers is not None:
File /opt/conda/lib/python3.10/json/encoder.py:405, in _make_iterencode.._iterencode_dict(dct, _current_indent_level)
403 else:
404 chunks = _iterencode(value, _current_indent_level)
--> 405 yield from chunks
406 if newline_indent is not None:
407 _current_indent_level -= 1
File /opt/conda/lib/python3.10/json/encoder.py:405, in _make_iterencode.._iterencode_dict(dct, _current_indent_level)
403 else:
404 chunks = _iterencode(value, _current_indent_level)
--> 405 yield from chunks
406 if newline_indent is not None:
407 _current_indent_level -= 1
File /opt/conda/lib/python3.10/json/encoder.py:405, in _make_iterencode.._iterencode_dict(dct, _current_indent_level)
403 else:
404 chunks = _iterencode(value, _current_indent_level)
--> 405 yield from chunks
406 if newline_indent is not None:
407 _current_indent_level -= 1
File /opt/conda/lib/python3.10/json/encoder.py:325, in _make_iterencode.._iterencode_list(lst, _current_indent_level)
323 else:
324 chunks = _iterencode(value, _current_indent_level)
--> 325 yield from chunks
326 if newline_indent is not None:
327 _current_indent_level -= 1
File /opt/conda/lib/python3.10/json/encoder.py:405, in _make_iterencode.._iterencode_dict(dct, _current_indent_level)
403 else:
404 chunks = _iterencode(value, _current_indent_level)
--> 405 yield from chunks
406 if newline_indent is not None:
407 _current_indent_level -= 1
File /opt/conda/lib/python3.10/json/encoder.py:405, in _make_iterencode.._iterencode_dict(dct, _current_indent_level)
403 else:
404 chunks = _iterencode(value, _current_indent_level)
--> 405 yield from chunks
406 if newline_indent is not None:
407 _current_indent_level -= 1
File /opt/conda/lib/python3.10/json/encoder.py:438, in _make_iterencode.._iterencode(o, _current_indent_level)
436 raise ValueError("Circular reference detected")
437 markers[markerid] = o
--> 438 o = _default(o)
439 yield from _iterencode(o, _current_indent_level)
440 if markers is not None:
File /opt/conda/lib/python3.10/json/encoder.py:179, in JSONEncoder.default(self, o)
160 def default(self, o):
161 """Implement this method in a subclass such that it returns
162 a serializable object for
o
, or calls the base implementation163 (to raise a
TypeError
).(...)
177
178 """
--> 179 raise TypeError(f'Object of type {o.class.name} '
180 f'is not JSON serializable')
TypeError: Object of type Categorical is not JSON serializable
The text was updated successfully, but these errors were encountered: