Skip to content

Commit

Permalink
Restructure schema to map
Browse files Browse the repository at this point in the history
  • Loading branch information
wwelling committed Jul 28, 2023
1 parent e3403a2 commit 3c75b99
Show file tree
Hide file tree
Showing 4 changed files with 61 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,9 @@ public Map<String, List<NamedTypedField>> getScaffold() {
return SCAFFOLD;
}

public List<Map<String, Object>> getSchema() {
// TODO: type out the response from Solr or not
public Map<String, Object> getSchema() {
Map<String, Object> schema = new HashMap<>();
Optional<FieldsResponse> fieldsRes = Optional.empty();

Optional<Object[]> response = Optional.ofNullable(this.ping());
Expand All @@ -104,8 +106,15 @@ public List<Map<String, Object>> getSchema() {
}

if (fieldsRes.isPresent()) {
return fieldsRes.map(fr -> fr.getFields())
.get();
fieldsRes.map(fr -> fr.getFields())
.get()
.stream()
.forEach(field -> {
String name = (String) field.get("name");
field.remove("name");
schema.put(name, field);
});
return schema;
}

throw new RuntimeException("fields request failed");
Expand Down Expand Up @@ -135,15 +144,11 @@ public void startup() {
if (status == 0) {
logger.info("Initializing index fields for {}", index.getName());

Map<String, Object> details = new HashMap<String, Object>();

details.put("schema", getSchema());

// suspecting some issues without shallow clone of response from Solr
Map<String, Object> schema = getSchema();

indexers.stream().forEach(indexer -> {
logger.info("Initializing fields for {}", indexer.name());
indexer.init((List<Map<String, Object>>) details.get("schema"));
indexer.init(schema);
});
} else {
logger.warn("Unable to connect to Solr collection {}", index.getName());
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package edu.tamu.scholars.middleware.discovery.service.component;

import java.util.Collection;
import java.util.List;
import java.util.Map;

import edu.tamu.scholars.middleware.discovery.model.AbstractIndexDocument;
Expand All @@ -19,7 +18,7 @@ public interface Indexer {
/**
* Everything the application needs the solr collection to have specified.
*/
public void init(List<Map<String, Object>> schema);
public void init(Map<String, Object> schema);

/**
* Index a batch of abstract index documents.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,9 @@ public class NamedTypedField {
public String name;
public FieldType fieldType;
public Field field;

@Override
public String toString() {
return name;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;

import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.request.schema.SchemaRequest;
Expand Down Expand Up @@ -55,7 +56,7 @@ public void scaffold() {
}

@Override
public void init(List<Map<String, Object>> schema) {
public void init(Map<String, Object> schema) {
if (!index.isInitOnStartup()) {
return;
}
Expand All @@ -65,24 +66,49 @@ public void init(List<Map<String, Object>> schema) {
.filter(ntf -> CREATED_FIELDS.add(ntf.name))
.forEach(ntf -> {

logger.info("Adding field {}.{}", this.name(), ntf.name);

// check if field is an existing property in schema
SchemaRequest.AddField addFieldRequest = SolrSchemaUtility.addFieldRequest(ntf);
Map<String, Object> field = (Map<String, Object>) schema.get(ntf.name);
if (Objects.nonNull(field)) {

logger.info("Field {}.{} already exists", this.name(), ntf.name);

// TODO: type these maps or not
// TODO: match version and declared field type in case field type definition changes
if (!field.get("type").equals(ntf.fieldType.type())) {
logger.error("Scaffold to Index type mismatch!!");
logger.debug("\tntf.name: " + ntf.name);
logger.debug("\tntf.fieldType.readonly: " + ntf.fieldType.readonly());
logger.debug("\tntf.fieldType.stored: " + ntf.fieldType.stored());
logger.debug("\tntf.fieldType.searchable: " + ntf.fieldType.searchable());
logger.debug("\tntf.fieldType.type: " + ntf.fieldType.type());
logger.debug("\tntf.fieldType.copyTo: " + ntf.fieldType.copyTo());
logger.debug("\tntf.fieldType.defaultValue: " + ntf.fieldType.defaultValue());
logger.debug("\tntf.fieldType.required: " + ntf.fieldType.required());
logger.debug("\tntf.fieldType.name: " + ntf.fieldType.name());
logger.debug("\tntf.fieldType.value: " + ntf.fieldType.value());

logger.debug("\tfield: " + field);
}
} else {
// create field and copy fields
logger.info("Adding new field {}.{}", this.name(), ntf.name);

try {
addFieldRequest.process(solrClient, COLLECTION);
} catch (Exception e) {
logger.debug("Failed to add field", e);
}
// check if field is an existing property in schema
SchemaRequest.AddField addFieldRequest = SolrSchemaUtility.addFieldRequest(ntf);

if (ntf.fieldType.copyTo().length > 0) {
logger.info("Adding copy field {}.{} => {}", this.name(), ntf.name, Arrays.asList(ntf.fieldType.copyTo()));
SchemaRequest.AddCopyField addCopyFieldRequest = SolrSchemaUtility.addCopyFieldRequest(ntf);
try {
addCopyFieldRequest.process(solrClient, COLLECTION);
addFieldRequest.process(solrClient, COLLECTION);
} catch (Exception e) {
logger.debug("Failed to add copy field", e);
logger.debug("Failed to add field", e);
}

if (ntf.fieldType.copyTo().length > 0) {
logger.info("Adding copy field {}.{} => {}", this.name(), ntf.name, Arrays.asList(ntf.fieldType.copyTo()));
SchemaRequest.AddCopyField addCopyFieldRequest = SolrSchemaUtility.addCopyFieldRequest(ntf);
try {
addCopyFieldRequest.process(solrClient, COLLECTION);
} catch (Exception e) {
logger.debug("Failed to add copy field", e);
}
}
}
});
Expand Down

0 comments on commit 3c75b99

Please sign in to comment.