Skip to content

Commit

Permalink
restructure package
Browse files Browse the repository at this point in the history
  • Loading branch information
skasberger committed Apr 14, 2021
1 parent 51b63f9 commit d2cac65
Show file tree
Hide file tree
Showing 32 changed files with 177 additions and 61 deletions.
10 changes: 3 additions & 7 deletions utils/__main__.py → src/dvtests/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,7 @@
from typing import List

import typer
from config import Config

from utils import collect_data
from utils import config
from utils import create_testdata
from utils import generate_data
from utils import INSTANCE_DATA_DIR
Expand Down Expand Up @@ -41,13 +38,12 @@ def create_testdata_command(config_file: str, force: bool = False) -> None:

@app.command("remove-testdata")
def remove_testdata_command(
config_file: str = None,
parent: str = None,
parent: str,
parent_data_type: str = "dataverse",
data_types: List[str] = ["dataverses", "datasets"],
ds_published: bool = False,
force: bool = False,
) -> None:
remove_testdata(config_file, parent, data_types, ds_published, force)
remove_testdata(parent, parent_data_type, data_types, force)
typer.echo(f"Testdata removed")


Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
103 changes: 49 additions & 54 deletions utils/utils.py → src/dvtests/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from time import sleep
from typing import List

from config import Config
from pyDataverse.api import NativeApi
from pyDataverse.models import Datafile
from pyDataverse.models import Dataset
Expand All @@ -18,6 +17,8 @@
from pyDataverse.utils import read_json
from pyDataverse.utils import write_json

from .config import Config


if os.getenv("ENV_FILE"):
config = Config(_env_file=os.getenv("ENV_FILE"))
Expand All @@ -29,7 +30,9 @@
)
if not os.path.isdir(INSTANCE_DATA_DIR):
os.makedirs(INSTANCE_DATA_DIR)
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
ROOT_DIR = os.path.dirname(
os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
)


def collect_data(
Expand Down Expand Up @@ -86,96 +89,88 @@ def create_testdata(config_file: str, force: bool) -> None:
for dv_conf in workflow["dataverses"]:
dv_alias = None
if "create" in dv_conf:
if dv_conf["create"]:
dv = Dataverse()
dv_filename = os.path.join(ROOT_DIR, dv_conf["filename"])
dv.from_json(read_file(dv_filename))
if "update" in dv_conf:
for key, val in dv_conf["update"].items():
kwargs = {key: val}
dv.set(kwargs)
dv_alias = dv.get()["alias"]
resp = api.create_dataverse(dv_conf["parent"], dv.json())
dv = Dataverse()
dv_filename = os.path.join(ROOT_DIR, dv_conf["create"]["metadata-filename"])
dv.from_json(read_file(dv_filename))
if "update" in dv_conf["create"]:
for key, val in dv_conf["create"]["update"].items():
kwargs = {key: val}
dv.set(kwargs)
dv_alias = dv.get()["alias"]
resp = api.create_dataverse(dv_conf["create"]["parent"], dv.json())

if "publish" in dv_conf:
if dv_conf["publish"]:
if not dv_alias and "alias" in dv_conf:
dv_alias = dv_conf["alias"]
resp = api.publish_dataverse(dv_alias)
if not dv_alias and "alias" in dv_conf["publish"]:
dv_alias = dv_conf["publish"]["alias"]
resp = api.publish_dataverse(dv_alias)

# Datasets
for ds_conf in workflow["datasets"]:
pid = None
if "create" in ds_conf:
if ds_conf["create"]:
ds = Dataset()
ds_filename = os.path.join(ROOT_DIR, ds_conf["filename"])
ds.from_json(read_file(ds_filename))
if "update" in ds_conf:
for key, val in ds_conf["update"].items():
kwargs = {key: val}
ds.set(kwargs)
resp = api.create_dataset(dv_alias, ds.json())
pid = resp.json()["data"]["persistentId"]
pid_idx.append(pid)
ds = Dataset()
ds_filename = os.path.join(ROOT_DIR, ds_conf["create"]["metadata-filename"])
ds.from_json(read_file(ds_filename))
if "update" in ds_conf["create"]:
for key, val in ds_conf["create"]["update"].items():
kwargs = {key: val}
ds.set(kwargs)
resp = api.create_dataset(dv_alias, ds.json())
pid = resp.json()["data"]["persistentId"]
pid_idx.append(pid)

if "publish" in ds_conf:
if ds_conf["publish"]:
if not pid:
print("ERROR: PID missing!")
sys.exit()
resp = api.publish_dataset(pid, release_type="major")
if not pid:
print("ERROR: PID missing!")
sys.exit()
resp = api.publish_dataset(pid, release_type="major")

# Datafiles
for df_conf in workflow["datafiles"]:
if "create" in df_conf:
if df_conf["create"]:
metadata = read_json(df_conf["metadata-filename"])
for dataset_id, ds_datafiles in workflow["datafiles"].items():
if int(dataset_id) == workflow["datasets"][int(dataset_id)]["id"]:
pid = pid_idx[int(dataset_id)]
else:
print("ERROR: Dataset ID not matching.")
sys.exit()
for df_conf in ds_datafiles:
if "upload" in df_conf:
metadata = read_json(df_conf["upload"]["metadata-filename"])
df = Datafile()
df.set(metadata)
if "update" in df_conf:
for key, val in df_conf["update"].items():
if "update" in df_conf["upload"]:
for key, val in df_conf["upload"]["update"].items():
kwargs = {key: val}
df.set(kwargs)
pid = pid_idx[df_conf["parent"]]
df.set({"pid": pid})
filename = df_conf["filename"]
filename = df_conf["upload"]["filename"]
resp = api.upload_datafile(pid, filename, df.json())
if filename[-4:] == ".sav" or filename[-4:] == ".dta":
sleep(30)
else:
sleep(3)
if "publish-dataset" in df_conf:
if df_conf["publish-dataset"]:
resp = api.publish_dataset(pid, release_type="major")
if "publish-dataset" in df_conf:
if df_conf["publish-dataset"]:
resp = api.publish_dataset(pid, release_type="major")


def remove_testdata(
config_file: str = None,
parent: str = None,
parent: str,
parent_data_type: str = "dataverse",
data_types: List[str] = ["dataverses", "datasets"],
ds_published: bool = False,
force: bool = False,
) -> None:
if config.PRODUCTION and not force:
print(
"Delete testdata on a PRODUCTION instance not allowed. Use --force to force it."
)
sys.exit()
workflow = read_json(os.path.join(ROOT_DIR, config_file))
if "parent" in workflow:
parent = workflow["parent"]
if "data-types" in workflow:
data_types = workflow["data-types"]
if "datasets-published" in workflow:
datasets_published = workflow["datasets-published"]

api = NativeApi(config.BASE_URL, config.API_TOKEN)

# Clean up
data = api.get_children(parent, children_types=data_types)
dataverses, datasets, datafiles = dataverse_tree_walker(data)
if "parent-data-type" in workflow:
if parent_data_type == "dataverse":
dataverses.append({"dataverse_alias": parent})

for ds in datasets:
Expand Down
File renamed without changes.
86 changes: 86 additions & 0 deletions src/dvtests/utils/configs/create_testdata_01.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
{
"dataverses": [
{
"create": {
"metadata-filename": "dataverse_testdata/metadata/json/dataverse/dataverse_upload_full_01.json",
"parent": ":root",
"update": {
"alias": "test_create_testdata",
"name": "Test Create Testdata"
}
},
"publish": {}
},
{
"create": {
"metadata-filename": "dataverse_testdata/metadata/json/dataverse/dataverse_upload_full_01.json",
"parent": "test_create_testdata"
},
"publish": {}
}
],
"datasets": [
{
"id": 0,
"create": {
"metadata-filename": "dataverse_testdata/metadata/json/dataset/dataset_upload_default_full_01.json",
"parent": "test_create_testdata"
},
"publish": {}
},
{
"id": 1,
"create": {
"metadata-filename": "dataverse_testdata/metadata/json/dataset/dataset_upload_default_min_01.json",
"parent": "DataverseName1"
},
"publish": {}
}
],
"datafiles": {
"0": [
{
"upload": {
"metadata-filename": "dataverse_testdata/metadata/json/datafile/datafile_upload_full_01.json",
"filename": "dataverse_testdata/files/jpeg/10000_image.jpeg",
"update": {
"filename": "10000_image.jpeg"
}
},
"publish-dataset": {}
},
{
"upload": {
"metadata-filename": "dataverse_testdata/metadata/json/datafile/datafile_upload_min_01.json",
"filename": "dataverse_testdata/files/png/10000_image.png",
"update": {
"filename": "10000_image.png"
}
},
"publish-dataset": {}
}
],
"1": [
{
"upload": {
"metadata-filename": "dataverse_testdata/metadata/json/datafile/datafile_upload_min_01.json",
"filename": "dataverse_testdata/files/do/10002_data_checks.do",
"update": {
"filename": "10002_data_checks.do"
}
},
"publish-dataset": {}
},
{
"upload": {
"metadata-filename": "dataverse_testdata/metadata/json/datafile/datafile_upload_full_01.json",
"filename": "dataverse_testdata/files/dta/10002_da_de_v0_9.dta",
"update": {
"filename": "10002_da_de_v0_9.dta"
}
},
"publish-dataset": {}
}
]
}
}
39 changes: 39 additions & 0 deletions src/dvtests/utils/configs/create_testdata_02.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
{
"dataverses": [
{
"create": {
"metadata-filename": "dataverse_testdata/metadata/json/dataverse/dataverse_upload_full_01.json",
"parent": ":root",
"update": {
"alias": "test_create_testdata",
"name": "Test Create Testdata"
}
},
"publish": {}
}
],
"datasets": [
{
"id": 0,
"create": {
"metadata-filename": "dataverse_testdata/metadata/json/dataset/dataset_upload_default_full_01.json",
"parent": "test_create_testdata"
},
"publish": {}
}
],
"datafiles": {
"0": [
{
"upload": {
"metadata-filename": "dataverse_testdata/metadata/json/datafile/datafile_upload_full_01.json",
"filename": "dataverse_testdata/files/jpeg/10000_image.jpeg",
"update": {
"filename": "10000_image.jpeg"
}
},
"publish-dataset": {}
}
]
}
}
Empty file removed utils/__init__.py
Empty file.

0 comments on commit d2cac65

Please sign in to comment.