Skip to content

Commit

Permalink
working pretty well
Browse files Browse the repository at this point in the history
  • Loading branch information
pdurbin committed Oct 25, 2024
1 parent 9dcdbce commit aa24a7a
Show file tree
Hide file tree
Showing 12 changed files with 228 additions and 51 deletions.
4 changes: 4 additions & 0 deletions linktest.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/bash -x
curl -s http://localhost:8080/api/datasets/datasetTypes/dataset -X PUT -d '["geospatial","astrophysics"]' | jq .
curl -s http://localhost:8080/api/datasets/datasetTypes | jq .
curl -s "http://localhost:8080/api/dataverses/root/metadatablocks?onlyDisplayedOnCreate=true&datasetType=dataset" | jq .
2 changes: 2 additions & 0 deletions showtable.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
#!/bin/sh
DOCKER_CLI_HINTS=false docker exec -it postgres-1 bash -c "psql -h localhost -U dataverse dataverse -c 'select * from datasettype_metadatablock'"
21 changes: 12 additions & 9 deletions src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import jakarta.persistence.OrderBy;
import jakarta.persistence.Table;
import jakarta.persistence.Transient;
import java.util.ArrayList;

/**
*
Expand Down Expand Up @@ -57,8 +58,9 @@ public class MetadataBlock implements Serializable, Comparable {
/**
* The dataset types this metadata block is associated with.
*/
@ManyToMany(mappedBy = "metadataBlocks")
private List<DatasetType> datasetTypes;
// @ManyToMany(mappedBy = "metadataBlocks", cascade = {CascadeType.MERGE})
// @ManyToMany(cascade = {CascadeType.MERGE})
// private List<DatasetType> datasetTypes = new ArrayList<>();

public Long getId() {
return id;
Expand All @@ -74,13 +76,14 @@ public void setName(String name) {
this.name = name;
}

public List<DatasetType> getDatasetTypes() {
return datasetTypes;
}

public void setDatasetTypes(List<DatasetType> datasetTypes) {
this.datasetTypes = datasetTypes;
}
// public List<DatasetType> getDatasetTypes() {
// return datasetTypes;
// }
//
// public void setDatasetTypes(List<DatasetType> datasetTypes) {
// // should this be ArrayList? this.metadataBlocks = new ArrayList<>(metadataBlocks);
// this.datasetTypes = datasetTypes;
// }

public String getNamespaceUri() {
return namespaceUri;
Expand Down
55 changes: 55 additions & 0 deletions src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
Original file line number Diff line number Diff line change
Expand Up @@ -5108,12 +5108,18 @@ public Response resetPidGenerator(@Context ContainerRequestContext crc, @PathPar
@GET
@Path("datasetTypes")
public Response getDatasetTypes() {
System.out.println("got here");
JsonArrayBuilder jab = Json.createArrayBuilder();
List<DatasetType> datasetTypes = datasetTypeSvc.listAll();
for (DatasetType datasetType : datasetTypes) {
JsonObjectBuilder job = Json.createObjectBuilder();
job.add("id", datasetType.getId());
job.add("name", datasetType.getName());
JsonArrayBuilder linkedMetadataBlocks = Json.createArrayBuilder();
for (MetadataBlock metadataBlock : datasetType.getMetadataBlocks()) {
linkedMetadataBlocks.add(metadataBlock.getName());
}
job.add("linkedMetadataBlocks", linkedMetadataBlocks);
jab.add(job);
}
return ok(jab.build());
Expand Down Expand Up @@ -5231,4 +5237,53 @@ public Response deleteDatasetType(@Context ContainerRequestContext crc, @PathPar
}
}

@AuthRequired
@PUT
@Path("datasetTypes/{idOrName}")
public Response updateDatasetTypeLinksWithMetadataBlocks(@Context ContainerRequestContext crc, @PathParam("idOrName") String idOrName, String jsonBody) {
DatasetType datasetType = null;
if (StringUtils.isNumeric(idOrName)) {
try {
long id = Long.parseLong(idOrName);
datasetType = datasetTypeSvc.getById(id);
} catch (NumberFormatException ex) {
return error(NOT_FOUND, "Could not find a dataset type with id " + idOrName);
}
} else {
datasetType = datasetTypeSvc.getByName(idOrName);
}
JsonArrayBuilder datasetTypesBefore = Json.createArrayBuilder();
for (MetadataBlock metadataBlock : datasetType.getMetadataBlocks()) {
datasetTypesBefore.add(metadataBlock.getName());
}
JsonArrayBuilder datasetTypesAfter = Json.createArrayBuilder();
List<MetadataBlock> metadataBlocksToSave = new ArrayList<>();
if (jsonBody != null && !jsonBody.isEmpty()) {
JsonArray json = JsonUtil.getJsonArray(jsonBody);
for (JsonString jsonValue : json.getValuesAs(JsonString.class)) {
String name = jsonValue.getString();
System.out.println("name: " + name);
MetadataBlock metadataBlock = metadataBlockSvc.findByName(name);
if (metadataBlock != null) {
metadataBlocksToSave.add(metadataBlock);
datasetTypesAfter.add(name);
} else {
String availableBlocks = metadataBlockSvc.listMetadataBlocks().stream().map(MetadataBlock::getName).collect(Collectors.joining(", "));
return badRequest("Metadata block not found: " + name + ". Available metadata blocks: " + availableBlocks);
}
}
}
try {
execCommand(new UpdateDatasetTypeLinksWithMetadataBlocks(createDataverseRequest(getRequestUser(crc)), datasetType, metadataBlocksToSave));
return ok(Json.createObjectBuilder()
.add("linkedMetadataBlocks", Json.createObjectBuilder()
.add("before", datasetTypesBefore)
.add("after", datasetTypesAfter))
);

} catch (WrappedResponse ex) {
return ex.getResponse();
}
}

}
32 changes: 17 additions & 15 deletions src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java
Original file line number Diff line number Diff line change
Expand Up @@ -64,28 +64,30 @@ public Response updateAssociationsWithDatasetTypes(@Context ContainerRequestCont
if (metadataBlock == null) {
return notFound("Can't find metadata block '" + idtf + "'");
}
List<DatasetType> datasetTypesExisting = metadataBlock.getDatasetTypes();
// List<DatasetType> datasetTypesExisting = metadataBlock.getDatasetTypes();
JsonArrayBuilder datasetTypesBefore = Json.createArrayBuilder();
for (DatasetType datasetType : datasetTypesExisting) {
datasetTypesBefore.add(datasetType.getName());
}
// for (DatasetType datasetType : datasetTypesExisting) {
// datasetTypesBefore.add(datasetType.getName());
// }
List<DatasetType> datasetTypesToSave = new ArrayList<>();
JsonArray json = JsonUtil.getJsonArray(jsonBody);
for (JsonString jsonValue : json.getValuesAs(JsonString.class)) {
String typeName = jsonValue.getString();
System.out.println("typename: " + typeName);
DatasetType datasetType = datasetTypeSvc.getByName(typeName);
datasetTypesToSave.add(datasetType);
if (jsonBody != null && !jsonBody.isEmpty()) {
JsonArray json = JsonUtil.getJsonArray(jsonBody);
for (JsonString jsonValue : json.getValuesAs(JsonString.class)) {
String typeName = jsonValue.getString();
System.out.println("typename: " + typeName);
DatasetType datasetType = datasetTypeSvc.getByName(typeName);
datasetTypesToSave.add(datasetType);
}
}
try {
MetadataBlock saved = execCommand(new UpdateMetadataBlockDatasetTypeAssociations(createDataverseRequest(getRequestUser(crc)), metadataBlock, datasetTypesToSave));
// Move this to command
List<DatasetType> savedMdb = saved.getDatasetTypes();
// List<DatasetType> savedMdb = saved.getDatasetTypes();
JsonArrayBuilder datasetTypesAfter = Json.createArrayBuilder();
for (DatasetType savedDatasetType : savedMdb) {
System.out.println("found one: " + savedDatasetType.getName());
datasetTypesAfter.add(savedDatasetType.getName());
}
// for (DatasetType savedDatasetType : savedMdb) {
// System.out.println("found one: " + savedDatasetType.getName());
// datasetTypesAfter.add(savedDatasetType.getName());
// }
return ok(Json.createObjectBuilder()
.add("associatedDatasetTypes", Json.createObjectBuilder()
.add("before", datasetTypesBefore)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import edu.harvard.iq.dataverse.MetadataBlock;
import jakarta.json.Json;
import jakarta.json.JsonObjectBuilder;
import jakarta.persistence.CascadeType;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
Expand All @@ -14,6 +15,7 @@
import jakarta.persistence.Table;
import jakarta.persistence.UniqueConstraint;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;

@NamedQueries({
Expand Down Expand Up @@ -48,8 +50,8 @@ public class DatasetType implements Serializable {
/**
* The metadata blocks this dataset type is associated with.
*/
@ManyToMany()
private List<MetadataBlock> metadataBlocks;
@ManyToMany(cascade = {CascadeType.MERGE})
private List<MetadataBlock> metadataBlocks = new ArrayList<>();

public DatasetType() {
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,18 +36,21 @@ public ListMetadataBlocksCommand(DataverseRequest request, Dataverse dataverse,

@Override
public List<MetadataBlock> execute(CommandContext ctxt) throws CommandException {
System.out.println("got to execute...");
if (onlyDisplayedOnCreate) {
return listMetadataBlocksDisplayedOnCreate(ctxt, dataverse);
}
return dataverse.getMetadataBlocks();
}

private List<MetadataBlock> listMetadataBlocksDisplayedOnCreate(CommandContext ctxt, Dataverse dataverse) {
if (dataverse.isMetadataBlockRoot() || dataverse.getOwner() == null) {
return ctxt.metadataBlocks().listMetadataBlocksDisplayedOnCreate(dataverse);
}
System.out.println("got to listMetadataBlocksDisplayedOnCreate...");
// TODO keep all this isMetadataBlockRoot and is root collection business?
// if (dataverse.isMetadataBlockRoot() || dataverse.getOwner() == null) {
// return ctxt.metadataBlocks().listMetadataBlocksDisplayedOnCreate(dataverse);
// }
// return listMetadataBlocksDisplayedOnCreate(ctxt, dataverse.getOwner());
List<MetadataBlock> metadataBlocks = listMetadataBlocksDisplayedOnCreate(ctxt, dataverse.getOwner());
List<MetadataBlock> metadataBlocks = ctxt.metadataBlocks().listMetadataBlocksDisplayedOnCreate(dataverse);
if (datasetType == null) {
System.out.println("no dataset type, returning normal list");
return metadataBlocks;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
package edu.harvard.iq.dataverse.engine.command.impl;

import edu.harvard.iq.dataverse.DvObject;
import edu.harvard.iq.dataverse.MetadataBlock;
import edu.harvard.iq.dataverse.dataset.DatasetType;
import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand;
import edu.harvard.iq.dataverse.engine.command.CommandContext;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import java.util.List;

// inspired by UpdateDataverseMetadataBlocksCommand
@RequiredPermissions({})
public class UpdateDatasetTypeLinksWithMetadataBlocks extends AbstractVoidCommand {

final DatasetType datasetType;
List<MetadataBlock> metadataBlocks;

public UpdateDatasetTypeLinksWithMetadataBlocks(DataverseRequest dataverseRequest, DatasetType datasetType, List<MetadataBlock> metadataBlocks) {
super(dataverseRequest, (DvObject) null);
this.datasetType = datasetType;
this.metadataBlocks = metadataBlocks;
}

@Override
protected void executeImpl(CommandContext ctxt) throws CommandException {
datasetType.setMetadataBlocks(metadataBlocks);
ctxt.em().merge(datasetType);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,19 @@
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.logging.Logger;
import java.util.stream.Collectors;

// inspired by UpdateDataverseMetadataBlocksCommand and DeactivateUserCommand
@RequiredPermissions({})
public class UpdateMetadataBlockDatasetTypeAssociations extends AbstractCommand<MetadataBlock> {

private static final Logger logger = Logger.getLogger(UpdateMetadataBlockDatasetTypeAssociations.class.getCanonicalName());

private DataverseRequest dataverseRequest;
private MetadataBlock metadataBlock;
private List<DatasetType> datasetTypes;
Expand All @@ -30,22 +35,80 @@ public UpdateMetadataBlockDatasetTypeAssociations(DataverseRequest dataverseRequ

@Override
public MetadataBlock execute(CommandContext ctxt) throws CommandException {
if (true) {
logger.info("exiting early");
// metadataBlock.setDatasetTypes(datasetTypes);
MetadataBlock savedMetadataBlock = ctxt.em().merge(metadataBlock);
return savedMetadataBlock;
}
if (!getUser().isSuperuser()) {
throw new PermissionException("Command can only be called by superusers.", this, null, null);
}
metadataBlock.setDatasetTypes(datasetTypes);
MetadataBlock savedMetadataBlock = ctxt.em().merge(metadataBlock);
// logger.info("before changing anything, block " + metadataBlock.getName() + " has these associations: "
// + metadataBlock.getDatasetTypes().stream()
// .map(DatasetType::getName)
// .collect(Collectors.joining(", ")));
// logger.info("about to set these types: " + datasetTypes.stream()
// .map(DatasetType::getName)
// .collect(Collectors.joining(", ")));
// metadataBlock.setDatasetTypes(datasetTypes);
// MetadataBlock savedMetadataBlock = ctxt.em().merge(metadataBlock);
if (datasetTypes.isEmpty()) {
// clear out all dataset types from metadata block
logger.info("dataset types is empty! clearing out");
// for (DatasetType datasetType : datasetTypes) {
// List<MetadataBlock> existing = datasetType.getMetadataBlocks();
// List<MetadataBlock> minusOne = existing;
// for (MetadataBlock mdb : existing) {
// if (mdb.equals(metadataBlock)) {
// minusOne.remove(mdb);
// datasetType.setMetadataBlocks(minusOne);
// if (true) {
// // just a test
// logger.info("just a test... sets to empty list");
// datasetType.setMetadataBlocks(new ArrayList<>());
// ctxt.em().merge(datasetType);
// }
// }
// }
// }
// MetadataBlock savedMetadataBlock2 = ctxt.em().merge(savedMetadataBlock);
logger.info("returning from is empty");
return null;
// return savedMetadataBlock2;
} else {
// set incoming dataset types for this metadatablock
logger.info("datasetTypes was not empty");
return null;

// MetadataBlock savedMetadataBlock = ctxt.em().merge(metadataBlock);
// ctxt.em().flush();
for (DatasetType datasetType : savedMetadataBlock.getDatasetTypes()) {
System.out.println("type: " + datasetType.getName());
List<MetadataBlock> blocks = datasetType.getMetadataBlocks();
blocks.add(metadataBlock);
// We filter the list through a set, so that all blocks are distinct.
datasetType.setMetadataBlocks(new LinkedList<>(new HashSet<>(blocks)));
ctxt.em().merge(datasetType);
// for (DatasetType datasetType : savedMetadataBlock.getDatasetTypes()) {
// System.out.println("type: " + datasetType.getName());
// List<MetadataBlock> blocks = datasetType.getMetadataBlocks();
// blocks.add(metadataBlock);
// // We filter the list through a set, so that all blocks are distinct.
// datasetType.setMetadataBlocks(new LinkedList<>(new HashSet<>(blocks)));
// ctxt.em().merge(datasetType);
// }
// // TODO save the block one more time?
//// return savedMetadataBlock;
// MetadataBlock savedMetadataBlock2 = ctxt.em().merge(savedMetadataBlock);
// logger.info("returning from not empty");
// return savedMetadataBlock2;
}
// TODO save the block one more time?
return savedMetadataBlock;
// MetadataBlock savedMetadataBlock = ctxt.em().merge(metadataBlock);
//// ctxt.em().flush();
// for (DatasetType datasetType : savedMetadataBlock.getDatasetTypes()) {
// System.out.println("type: " + datasetType.getName());
// List<MetadataBlock> blocks = datasetType.getMetadataBlocks();
// blocks.add(metadataBlock);
// // We filter the list through a set, so that all blocks are distinct.
// datasetType.setMetadataBlocks(new LinkedList<>(new HashSet<>(blocks)));
// ctxt.em().merge(datasetType);
// }
// // TODO save the block one more time?
// return savedMetadataBlock;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -658,9 +658,10 @@ public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printO

jsonObjectBuilder.add("fields", fieldsBuilder);
JsonArrayBuilder jab = Json.createArrayBuilder();
for (DatasetType datasetType : metadataBlock.getDatasetTypes()) {
jab.add(datasetType.getName());
}
// for (DatasetType datasetType : metadataBlock.getDatasetTypes()) {
// System.out.println("in jsonPrinter, for block " + metadataBlock.getName() + " adding " + datasetType.getName());
// jab.add(datasetType.getName());
// }
jsonObjectBuilder.add("associatedDatasetTypes", jab);
return jsonObjectBuilder;
}
Expand Down
Loading

0 comments on commit aa24a7a

Please sign in to comment.