Skip to content

Commit

Permalink
Merge pull request #154 from Sparks29032/explicit_paths
Browse files Browse the repository at this point in the history
Fix tools diffpy.utils paths
  • Loading branch information
sbillinge authored Nov 12, 2024
2 parents 8595c1f + 7ab7e6f commit c7b1f60
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 5 deletions.
23 changes: 23 additions & 0 deletions news/explicit_paths.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
**Added:**

* <news item>

**Changed:**

* Paths to diffpy.utils.parsers functions made explicitly to the file level.

**Deprecated:**

* <news item>

**Removed:**

* <news item>

**Fixed:**

* <news item>

**Security:**

* <news item>
9 changes: 4 additions & 5 deletions src/diffpy/pdfmorph/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,9 +116,8 @@ def readPDF(fname):
r,gr
Arrays read from data.
"""
from diffpy.utils.parsers import loadData

rv = loadData(fname, unpack=True)
rv = parsers.loaddata.loadData(fname, unpack=True)
if len(rv) >= 2:
return rv[:2]
return (None, None)
Expand Down Expand Up @@ -146,7 +145,7 @@ def deserialize(serial_file):
dict
Data read from serial file.
"""
return parsers.deserialize_data(serial_file)
return parsers.serialization.deserialize_data(serial_file)


def case_insensitive_dictionary_search(key: str, dictionary: dict):
Expand Down Expand Up @@ -200,11 +199,11 @@ def field_sort(filepaths: list, field, reverse=False, serfile=None, get_field_va
files_field_values = []
if serfile is None:
for path in filepaths:
fhd = parsers.loadData(path, headers=True)
fhd = parsers.loaddata.loadData(path, headers=True)
files_field_values.append([path, case_insensitive_dictionary_search(field, fhd)])
else:
# deserialize the serial file
des_dict = parsers.deserialize_data(serfile)
des_dict = parsers.serialization.deserialize_data(serfile)

# get names of each file to search the serial file
import pathlib
Expand Down

0 comments on commit c7b1f60

Please sign in to comment.