diff --git a/api/src/org/labkey/api/ApiModule.java b/api/src/org/labkey/api/ApiModule.java index d613909b2b6..5fe570e469b 100644 --- a/api/src/org/labkey/api/ApiModule.java +++ b/api/src/org/labkey/api/ApiModule.java @@ -38,6 +38,7 @@ import org.labkey.api.collections.CaseInsensitiveHashSet; import org.labkey.api.collections.CaseInsensitiveMapWrapper; import org.labkey.api.collections.CollectionUtils; +import org.labkey.api.collections.DeltaTrackingMap; import org.labkey.api.collections.LabKeyCollectors; import org.labkey.api.collections.Sampler; import org.labkey.api.collections.SwapQueue; @@ -384,6 +385,7 @@ public void registerServlets(ServletContext servletCtx) DatabaseCache.TestCase.class, DateUtil.TestCase.class, DbScope.DialectTestCase.class, + DeltaTrackingMap.TestCase.class, DetailsURL.TestCase.class, DiskCachingDataIterator.DiskTestCase.class, EmailTemplate.TestCase.class, diff --git a/api/src/org/labkey/api/collections/DeltaTrackingMap.java b/api/src/org/labkey/api/collections/DeltaTrackingMap.java new file mode 100644 index 00000000000..503b86afa1c --- /dev/null +++ b/api/src/org/labkey/api/collections/DeltaTrackingMap.java @@ -0,0 +1,486 @@ +package org.labkey.api.collections; + +import org.jetbrains.annotations.NotNull; +import org.junit.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; + +/** + * A lightweight Map proxy that tracks structural modifications (additions and removals) + * without requiring deep copies or full iteration of the underlying map. + *

+ * This class is highly optimized for "happy path" data-processing scenarios where structural + * changes are rare. Tracking sets are allocated lazily only upon the first actual addition + * or removal. Standard value updates to existing keys pass through with zero allocation overhead. + *

+ * Example Usage: + *

{@code
+ * Map baseRow = new CaseInsensitiveHashMap<>();
+ * baseRow.put("ColumnA", "Value1");
+ *
+ * DeltaTrackingMap trackedRow = new DeltaTrackingMap<>(baseRow);
+ *
+ * // Updating an existing key (zero tracking overhead)
+ * trackedRow.put("ColumnA", "NewValue");
+ *
+ * // Adding a new key
+ * trackedRow.put("ColumnB", "Value2");
+ *
+ * // Removing a key
+ * trackedRow.remove("ColumnA");
+ *
+ * if (trackedRow.hasStructuralChanges())
+ * {
+ *     Set added = trackedRow.getAddedKeys();     // Contains ["ColumnB"]
+ *     Set removed = trackedRow.getRemovedKeys(); // Contains ["ColumnA"]
+ *
+ *     // Reset tracking state if you need to pass the map to another processor
+ *     trackedRow.resetTracking();
+ * }
+ * }
+ *
+ * @param  the type of mapped values
+ */
+public class DeltaTrackingMap implements Map
+{
+    private final Map delegate;
+    private Set added = null;
+    private Set removed = null;
+
+    public DeltaTrackingMap(Map delegate)
+    {
+        this.delegate = delegate;
+    }
+
+    @Override
+    public V put(String key, V value)
+    {
+        boolean isNew = !delegate.containsKey(key);
+        V prev = delegate.put(key, value);
+
+        if (isNew)
+        {
+            // If it was previously removed, an add operation cancels out the removal
+            if (removed != null && removed.contains(key))
+                removed.remove(key);
+            else
+            {
+                if (added == null)
+                    added = Sets.newCaseInsensitiveHashSet();
+                added.add(key);
+            }
+        }
+
+        return prev;
+    }
+
+    @Override
+    public V remove(Object key)
+    {
+        boolean exists = delegate.containsKey(key);
+        V prev = delegate.remove(key);
+
+        if (exists && key instanceof String strKey)
+        {
+            // If it was just added, a remove cancels out the add operation
+            if (added != null && added.contains(strKey))
+                added.remove(strKey);
+            else
+            {
+                if (removed == null)
+                    removed = Sets.newCaseInsensitiveHashSet();
+                removed.add(strKey);
+            }
+        }
+
+        return prev;
+    }
+
+    @Override
+    public void putAll(Map m)
+    {
+        for (Entry entry : m.entrySet())
+            put(entry.getKey(), entry.getValue());
+    }
+
+    @Override
+    public void clear()
+    {
+        // Snapshot keys first to avoid ConcurrentModificationException during removal
+        for (String key : new ArrayList<>(delegate.keySet()))
+            remove(key);
+    }
+
+    // --- State Checking and Reset ---
+
+    public boolean hasStructuralChanges()
+    {
+        return (added != null && !added.isEmpty()) || (removed != null && !removed.isEmpty());
+    }
+
+    public Set getAddedKeys()
+    {
+        return added != null ? Collections.unmodifiableSet(added) : Collections.emptySet();
+    }
+
+    public Set getRemovedKeys()
+    {
+        return removed != null ? Collections.unmodifiableSet(removed) : Collections.emptySet();
+    }
+
+    public void resetTracking()
+    {
+        if (added != null) added.clear();
+        if (removed != null) removed.clear();
+    }
+
+    // --- Standard Delegated Methods ---
+
+    @Override
+    public int size()
+    {
+        return delegate.size();
+    }
+
+    @Override
+    public boolean isEmpty()
+    {
+        return delegate.isEmpty();
+    }
+
+    @Override
+    public boolean containsKey(Object key)
+    {
+        return delegate.containsKey(key);
+    }
+
+    @Override
+    public boolean containsValue(Object value)
+    {
+        return delegate.containsValue(value);
+    }
+
+    @Override
+    public V get(Object key)
+    {
+        return delegate.get(key);
+    }
+
+    /**
+     * Returns an unmodifiable view of the key set. Use {@link #put} and {@link #remove} to
+     * mutate the map so that structural changes are correctly tracked.
+     */
+    @Override
+    public @NotNull Set keySet()
+    {
+        return Collections.unmodifiableSet(delegate.keySet());
+    }
+
+    /**
+     * Returns an unmodifiable view of the values collection. Use {@link #put} and {@link #remove} to
+     * mutate the map so that structural changes are correctly tracked.
+     */
+    @Override
+    public @NotNull Collection values()
+    {
+        return Collections.unmodifiableCollection(delegate.values());
+    }
+
+    /**
+     * Returns an unmodifiable view of the entry set. Use {@link #put} and {@link #remove} to
+     * mutate the map so that structural changes are correctly tracked.
+     */
+    @Override
+    public @NotNull Set> entrySet()
+    {
+        return Collections.unmodifiableSet(delegate.entrySet());
+    }
+
+    public static class TestCase extends Assert
+    {
+        private static DeltaTrackingMap createTracker()
+        {
+            Map baseMap = new CaseInsensitiveHashMap<>();
+            baseMap.put("ExistingKey1", "Value1");
+            baseMap.put("ExistingKey2", "Value2");
+            baseMap.put("ExistingKey3", "Value3");
+            return new DeltaTrackingMap<>(baseMap);
+        }
+
+        @Test
+        public void testNoStructuralChanges()
+        {
+            DeltaTrackingMap map = createTracker();
+
+            // Updating an existing key should not trigger structural changes
+            map.put("ExistingKey1", "NewValue1");
+            // Case-insensitive update of an existing key
+            map.put("existingkey2", "NewValue2");
+
+            assertFalse("Updates to existing keys should not flag structural changes", map.hasStructuralChanges());
+            assertTrue(map.getAddedKeys().isEmpty());
+            assertTrue(map.getRemovedKeys().isEmpty());
+        }
+
+        @Test
+        public void testAdditions()
+        {
+            DeltaTrackingMap map = createTracker();
+
+            map.put("NewKey1", "NewValue");
+            assertTrue("Adding a key should flag structural changes", map.hasStructuralChanges());
+            assertTrue(map.getAddedKeys().contains("NewKey1"));
+            assertTrue(map.getRemovedKeys().isEmpty());
+
+            // Case-insensitive check of the tracking set
+            assertTrue(map.getAddedKeys().contains("newkey1"));
+        }
+
+        @Test
+        public void testRemovals()
+        {
+            DeltaTrackingMap map = createTracker();
+
+            map.remove("ExistingKey1");
+            assertTrue("Removing a key should flag structural changes", map.hasStructuralChanges());
+            assertTrue(map.getRemovedKeys().contains("ExistingKey1"));
+            assertTrue(map.getAddedKeys().isEmpty());
+
+            // Case-insensitive remove and check
+            map.remove("existingkey2");
+            assertTrue(map.getRemovedKeys().contains("ExistingKey2"));
+            assertEquals(2, map.getRemovedKeys().size());
+        }
+
+        @Test
+        public void testCancellations()
+        {
+            DeltaTrackingMap map = createTracker();
+
+            // Remove then Add (Cancels the removal)
+            map.remove("ExistingKey1");
+            assertTrue(map.getRemovedKeys().contains("ExistingKey1"));
+
+            map.put("ExistingKey1", "RestoredValue");
+            assertFalse("Adding back a removed key should cancel the structural change", map.hasStructuralChanges());
+            assertTrue(map.getRemovedKeys().isEmpty());
+            assertTrue(map.getAddedKeys().isEmpty());
+
+            // Add then Remove (Cancels the addition)
+            map.put("TemporaryKey", "TempValue");
+            assertTrue(map.getAddedKeys().contains("TemporaryKey"));
+
+            map.remove("TemporaryKey");
+            assertFalse("Removing a newly added key should cancel the structural change", map.hasStructuralChanges());
+            assertTrue(map.getRemovedKeys().isEmpty());
+            assertTrue(map.getAddedKeys().isEmpty());
+        }
+
+        @Test
+        public void testBulkOperations()
+        {
+            DeltaTrackingMap map = createTracker();
+
+            // test putAll()
+            Map bulkAdds = new CaseInsensitiveHashMap<>();
+            bulkAdds.put("ExistingKey1", "UpdatedValue"); // Update
+            bulkAdds.put("NewBulk1", "V1");               // Add
+            bulkAdds.put("NewBulk2", "V2");               // Add
+
+            map.putAll(bulkAdds);
+            assertTrue(map.hasStructuralChanges());
+            assertEquals("Should only track the 2 new keys", 2, map.getAddedKeys().size());
+            assertTrue(map.getAddedKeys().contains("NewBulk1"));
+            assertTrue(map.getAddedKeys().contains("NewBulk2"));
+
+            map.resetTracking();
+
+            // test clear()
+            map.clear();
+            assertTrue(map.hasStructuralChanges());
+            assertEquals("Should track all keys removed during clear", 5, map.getRemovedKeys().size());
+            assertTrue(map.isEmpty());
+        }
+
+        @Test
+        public void testResetTracking()
+        {
+            DeltaTrackingMap map = createTracker();
+
+            map.put("NewKey", "Val");
+            map.remove("ExistingKey1");
+
+            assertTrue(map.hasStructuralChanges());
+            assertEquals(1, map.getAddedKeys().size());
+            assertEquals(1, map.getRemovedKeys().size());
+
+            map.resetTracking();
+
+            assertFalse(map.hasStructuralChanges());
+            assertTrue(map.getAddedKeys().isEmpty());
+            assertTrue(map.getRemovedKeys().isEmpty());
+        }
+
+        @Test
+        public void testEncapsulationOfTrackingSets()
+        {
+            DeltaTrackingMap map = createTracker();
+            map.put("NewKey", "Val");
+
+            Set addedKeys = map.getAddedKeys();
+            try
+            {
+                addedKeys.remove("NewKey");
+                fail("getAddedKeys() should return an unmodifiable set to prevent internal state corruption.");
+            }
+            catch (UnsupportedOperationException e)
+            {
+                // Expected behavior
+            }
+        }
+
+        @Test
+        public void testMapViewsAreUnmodifiable()
+        {
+            DeltaTrackingMap map = createTracker();
+
+            // keySet().remove() must not bypass tracking
+            try
+            {
+                map.keySet().remove("ExistingKey1");
+                fail("keySet() should return an unmodifiable view.");
+            }
+            catch (UnsupportedOperationException e)
+            {
+                // Expected behavior
+            }
+            assertFalse("keySet().remove() must not have modified the map", map.hasStructuralChanges());
+            assertTrue(map.containsKey("ExistingKey1"));
+
+            // entrySet() iterator remove() must not bypass tracking
+            try
+            {
+                map.entrySet().iterator().remove();
+                fail("entrySet() should return an unmodifiable view.");
+            }
+            catch (UnsupportedOperationException e)
+            {
+                // Expected behavior
+            }
+
+            // values() remove() must not bypass tracking
+            try
+            {
+                map.values().remove("Value1");
+                fail("values() should return an unmodifiable view.");
+            }
+            catch (UnsupportedOperationException e)
+            {
+                // Expected behavior
+            }
+            assertFalse("No mutations should have been tracked", map.hasStructuralChanges());
+        }
+
+        @Test
+        public void testCrossCaseCancellations()
+        {
+            DeltaTrackingMap map = createTracker();
+
+            // 1. Remove Exact, Add Lowercase (Cancels the removal)
+            map.remove("ExistingKey1");
+            map.put("existingkey1", "RestoredValue"); // Different casing than original
+
+            assertFalse("Adding back a removed key with different casing should cancel the structural change", map.hasStructuralChanges());
+            assertTrue(map.getRemovedKeys().isEmpty());
+            assertTrue(map.getAddedKeys().isEmpty());
+
+            // 2. Add Exact, Remove Lowercase (Cancels the addition)
+            map.put("TemporaryKey", "TempValue");
+            map.remove("temporarykey"); // Different casing than addition
+
+            assertFalse("Removing a newly added key with different casing should cancel the structural change", map.hasStructuralChanges());
+            assertTrue(map.getRemovedKeys().isEmpty());
+            assertTrue(map.getAddedKeys().isEmpty());
+        }
+
+        @Test
+        public void testNullValues()
+        {
+            DeltaTrackingMap map = createTracker();
+
+            // Adding a new key with a null value
+            map.put("NullValueKey", null);
+            assertTrue(map.hasStructuralChanges());
+            assertTrue(map.getAddedKeys().contains("NullValueKey"));
+
+            // Updating an existing key to null
+            map.put("ExistingKey1", null);
+            assertEquals("Updating existing key to null should not trigger structural changes", 1, map.getAddedKeys().size());
+            assertTrue(map.getRemovedKeys().isEmpty());
+        }
+
+        @Test
+        public void testNonStringRemoval()
+        {
+            DeltaTrackingMap map = createTracker();
+
+            // Verify passing a non-String doesn't throw a ClassCastException.
+            String result = map.remove(12345);
+            assertNull(result);
+            assertFalse("Removing a non-existent, non-string key should do nothing", map.hasStructuralChanges());
+        }
+
+        @Test
+        public void testWithLinkedHashMap()
+        {
+            // Use a standard, case-sensitive map which preserves insertion order
+            Map baseMap = new LinkedHashMap<>();
+            baseMap.put("FirstKey", "V1");
+            baseMap.put("SecondKey", "V2");
+
+            DeltaTrackingMap map = new DeltaTrackingMap<>(baseMap);
+
+            // Verify standard tracking works normally
+            map.put("ThirdKey", "V3");
+            map.remove("FirstKey");
+
+            assertTrue(map.hasStructuralChanges());
+            assertEquals(1, map.getAddedKeys().size());
+            assertTrue(map.getAddedKeys().contains("ThirdKey"));
+
+            assertEquals(1, map.getRemovedKeys().size());
+            assertTrue(map.getRemovedKeys().contains("FirstKey"));
+
+            // Verify underlying map features (iteration order) are preserved
+            map.put("FourthKey", "V4");
+
+            String[] expectedOrder = {"SecondKey", "ThirdKey", "FourthKey"};
+            int i = 0;
+            for (String key : map.keySet())
+            {
+                assertEquals("Iteration order should match LinkedHashMap's insertion order", expectedOrder[i], key);
+                i++;
+            }
+
+            map.resetTracking();
+
+            map.put("secondkey", "V-lower");
+
+            assertEquals("LinkedHashMap treats different casing as distinct keys", 4, map.size());
+            assertTrue("Tracker flagged the new key", map.getAddedKeys().contains("secondkey"));
+
+            map.remove("SecondKey");
+
+            assertEquals(3, map.size());
+            assertFalse(map.containsKey("SecondKey"));
+            assertTrue(map.containsKey("secondkey"));
+            assertFalse("Tracker lost the state because case-insensitive matching cancelled out the add/remove", map.hasStructuralChanges());
+        }
+    }
+}
\ No newline at end of file
diff --git a/api/src/org/labkey/api/data/AbstractTableInfo.java b/api/src/org/labkey/api/data/AbstractTableInfo.java
index 4ed45a77eb9..53edf288e16 100644
--- a/api/src/org/labkey/api/data/AbstractTableInfo.java
+++ b/api/src/org/labkey/api/data/AbstractTableInfo.java
@@ -27,9 +27,12 @@
 import org.junit.Test;
 import org.labkey.api.cache.CacheManager;
 import org.labkey.api.collections.CaseInsensitiveHashMap;
+import org.labkey.api.collections.CaseInsensitiveHashSet;
 import org.labkey.api.collections.CaseInsensitiveMapWrapper;
 import org.labkey.api.collections.CaseInsensitiveTreeSet;
+import org.labkey.api.collections.DeltaTrackingMap;
 import org.labkey.api.collections.NamedObjectList;
+import org.labkey.api.collections.Sets;
 import org.labkey.api.data.dialect.SqlDialect;
 import org.labkey.api.data.triggers.ScriptTriggerFactory;
 import org.labkey.api.data.triggers.Trigger;
@@ -1884,6 +1887,28 @@ public final boolean canStreamTriggers(Container c)
         return true;
     }
 
+    @Override
+    public @Nullable Set getTriggerManagedColumns(@Nullable Container c, QueryUpdateService.InsertOption insertOption)
+    {
+        var triggers = getTriggers(c);
+        if (triggers.isEmpty())
+            return null;
+
+        var columns = new CaseInsensitiveHashSet();
+        for (var trigger : triggers)
+        {
+            var managedColumns = trigger.getManagedColumns();
+            if (managedColumns == null)
+                continue;
+
+            if (insertOption.updateOnly)
+                columns.addAll(managedColumns.update());
+            else
+                columns.addAll(managedColumns.insert());
+        }
+
+        return columns.isEmpty() ? null : columns;
+    }
 
     private Collection _triggers = null;
 
@@ -1926,7 +1951,7 @@ public final void resetTriggers(Container c)
     }
 
     @Override
-    public final void fireBatchTrigger(Container c, User user, TriggerType type, boolean before, BatchValidationException batchErrors, Map extraContext)
+    public final void fireBatchTrigger(Container c, User user, TriggerType type, @Nullable QueryUpdateService.InsertOption insertOption, boolean before, BatchValidationException batchErrors, Map extraContext)
             throws BatchValidationException
     {
         assert batchErrors != null;
@@ -1945,9 +1970,19 @@ public final void fireBatchTrigger(Container c, User user, TriggerType type, boo
     }
 
     @Override
-    public void fireRowTrigger(Container c, User user, TriggerType type, boolean before, int rowNumber,
-                               @Nullable Map newRow, @Nullable Map oldRow, Map extraContext, @Nullable Map existingRecord)
-            throws ValidationException
+    public void fireRowTrigger(
+        Container c,
+        User user,
+        TriggerType type,
+        @Nullable QueryUpdateService.InsertOption insertOption,
+        boolean before,
+        int rowNumber,
+        @Nullable Map newRow,
+        @Nullable Map oldRow,
+        Map extraContext,
+        @Nullable Map existingRecord,
+        boolean manageColumns
+    ) throws ValidationException
     {
         ValidationException errors = new ValidationException();
         errors.setSchemaName(getPublicSchemaName());
@@ -1957,9 +1992,81 @@ public void fireRowTrigger(Container c, User user, TriggerType type, boolean bef
 
         Collection triggers = getTriggers(c);
 
+        // Wrap the row once before the loop
+        DeltaTrackingMap trackedRow = null;
+        Map newRowTracked = newRow;
+
+        if (newRow != null && before)
+        {
+            trackedRow = new DeltaTrackingMap<>(newRow);
+            newRowTracked = trackedRow;
+        }
+
         for (Trigger script : triggers)
         {
-            script.rowTrigger(this, c, user, type, before, rowNumber, newRow, oldRow, errors, extraContext, existingRecord);
+            if (trackedRow != null)
+                trackedRow.resetTracking();
+
+            script.rowTrigger(this, c, user, type, insertOption, before, rowNumber, newRowTracked, oldRow, errors, extraContext, existingRecord);
+            if (errors.hasErrors())
+                break;
+
+            if (trackedRow != null)
+            {
+                var managed = script.getManagedColumns();
+                var managedCols = managed != null ? managed.getColumns(type) : null;
+
+                // Verify the trigger did not add or remove columns that are not managed
+                if (trackedRow.hasStructuralChanges())
+                {
+                    var added = Sets.newCaseInsensitiveHashSet(trackedRow.getAddedKeys());
+                    var removed = Sets.newCaseInsensitiveHashSet(trackedRow.getRemovedKeys());
+
+                    if (managedCols != null)
+                    {
+                        added.removeAll(managedCols);
+                        removed.removeAll(managedCols);
+                        if (managed.ignored() != null)
+                        {
+                            added.removeAll(managed.ignored());
+                            removed.removeAll(managed.ignored());
+                        }
+                    }
+
+                    if (!added.isEmpty() || !removed.isEmpty())
+                    {
+                        var diffs = new ArrayList();
+
+                        if (!added.isEmpty())
+                            diffs.add("add: " + added.stream().map(col -> "'" + col + "'").collect(Collectors.joining(", ")));
+                        if (!removed.isEmpty())
+                            diffs.add("remove: " + removed.stream().map(col -> "'" + col + "'").collect(Collectors.joining(", ")));
+
+                        String message = "Trigger '" + script.getName() + "' attempted to " + String.join(", ", diffs) + ". Declare managed columns to include them in the column set.";
+                        if (manageColumns)
+                            errors.addGlobalError(message);
+                        else
+                            LOG.warn(message + " This will be an error if invoked via data iteration.");
+                    }
+                }
+
+                // Verify the trigger handles every managed column
+                if (managedCols != null)
+                {
+                    for (var col : managedCols)
+                    {
+                        if (!newRowTracked.containsKey(col))
+                        {
+                            String message = "Trigger '" + script.getName() + "' declared the managed column '" + col + "' but did not set a value for it. Set null to clear or provide a value.";
+                            if (manageColumns)
+                                errors.addFieldError(col, message);
+                            else
+                                LOG.warn(message + " This will be an error if invoked via data iteration.");
+                        }
+                    }
+                }
+            }
+
             if (errors.hasErrors())
                 break;
         }
diff --git a/api/src/org/labkey/api/data/SchemaTableInfo.java b/api/src/org/labkey/api/data/SchemaTableInfo.java
index a4db436d931..d8c2eb9daca 100644
--- a/api/src/org/labkey/api/data/SchemaTableInfo.java
+++ b/api/src/org/labkey/api/data/SchemaTableInfo.java
@@ -907,13 +907,13 @@ public Collection getNamedParameters()
     }
 
     @Override
-    public void fireBatchTrigger(Container c, User user, TriggerType type, boolean before, BatchValidationException errors, Map extraContext)
+    public void fireBatchTrigger(Container c, User user, TriggerType type, @Nullable QueryUpdateService.InsertOption insertOption, boolean before, BatchValidationException errors, Map extraContext)
     {
         throw new UnsupportedOperationException("Table triggers not yet supported on schema tables");
     }
 
     @Override
-    public void fireRowTrigger(Container c, User user, TriggerType type, boolean before, int rowNumber, Map newRow, Map oldRow, Map extraContext, @Nullable Map existingRecord)
+    public void fireRowTrigger(Container c, User user, TriggerType type, @Nullable QueryUpdateService.InsertOption insertOption, boolean before, int rowNumber, Map newRow, Map oldRow, Map extraContext, @Nullable Map existingRecord, boolean manageColumns)
     {
         throw new UnsupportedOperationException("Table triggers not yet supported on schema tables");
     }
diff --git a/api/src/org/labkey/api/data/TableInfo.java b/api/src/org/labkey/api/data/TableInfo.java
index 1ff06b90cc5..d9f9078a775 100644
--- a/api/src/org/labkey/api/data/TableInfo.java
+++ b/api/src/org/labkey/api/data/TableInfo.java
@@ -40,6 +40,7 @@
 import org.labkey.api.security.HasPermission;
 import org.labkey.api.security.User;
 import org.labkey.api.security.permissions.ReadPermission;
+import org.labkey.api.settings.AppProps;
 import org.labkey.api.util.ContainerContext;
 import org.labkey.api.util.Pair;
 import org.labkey.api.util.Path;
@@ -508,7 +509,7 @@ enum TriggerMethod
 
     /**
      * Executes any trigger scripts for this table.
-     *
+     * 

* The trigger should be called once before and once after an entire set of rows for each of the * INSERT, UPDATE, DELETE trigger types. A trigger script may set up data structures to be used * during validation. In particular, the trigger script might want to do a query to populate a set of @@ -555,19 +556,22 @@ enum TriggerMethod * @param c The current Container. * @param user the current user * @param type The TriggerType for the event. + * @param insertOption The insertOption that invoked this trigger. Will be null when invoked outside a data iterator. * @param before true if the trigger is before the event, false if after the event. * @param errors Any errors created by the validation script will be added to the errors collection. * @param extraContext Optional additional bindings to set in the script's context when evaluating. * @throws BatchValidationException if the trigger function returns false or the errors map isn't empty. */ - void fireBatchTrigger(Container c, User user, TriggerType type, boolean before, BatchValidationException errors, Map extraContext) + void fireBatchTrigger(Container c, User user, TriggerType type, @Nullable QueryUpdateService.InsertOption insertOption, boolean before, BatchValidationException errors, Map extraContext) throws BatchValidationException; default void fireRowTrigger(Container c, User user, TriggerType type, boolean before, int rowNumber, - @Nullable Map newRow, @Nullable Map oldRow, Map extraContext) + @Nullable Map newRow, @Nullable Map oldRow, Map extraContext) throws ValidationException { - fireRowTrigger(c, user, type, before, rowNumber, newRow, oldRow, extraContext, null); + // In production, columns are not managed for non-data iterator invoked triggers and will log a warning. + // In development, columns are managed for all non-data iterator triggers and will throw an error. + fireRowTrigger(c, user, type, null, before, rowNumber, newRow, oldRow, extraContext, null, AppProps.getInstance().isDevMode() && QueryService.get().isTriggerManagedColumnsEnabled()); } /** @@ -600,16 +604,28 @@ default void fireRowTrigger(Container c, User user, TriggerType type, boolean be * @param c The current Container. * @param user the current user * @param type The TriggerType for the event. + * @param insertOption The insertOption that invoked this trigger. Will be null when invoked outside a data iterator. * @param before true if the trigger is before the event, false if after the event. * @param newRow The new row for INSERT and UPDATE. * @param oldRow The previous row for UPDATE and DELETE * @param extraContext Optional additional bindings to set in the script's context when evaluating. * @param existingRecord Optional existing record for the row, used for merge operation to differentiate new vs existing row + * @param manageColumns Whether to manage columns for the row. * @throws ValidationException if the trigger function returns false or the errors map isn't empty. */ - void fireRowTrigger(Container c, User user, TriggerType type, boolean before, int rowNumber, - @Nullable Map newRow, @Nullable Map oldRow, Map extraContext, @Nullable Map existingRecord) - throws ValidationException; + void fireRowTrigger( + Container c, + User user, + TriggerType type, + @Nullable QueryUpdateService.InsertOption insertOption, + boolean before, + int rowNumber, + @Nullable Map newRow, + @Nullable Map oldRow, + Map extraContext, + @Nullable Map existingRecord, + boolean manageColumns + ) throws ValidationException; /** * Return true if there are trigger scripts associated with this table. @@ -619,7 +635,18 @@ void fireRowTrigger(Container c, User user, TriggerType type, boolean before, in /** * Return true if all trigger scripts support streaming. */ - default boolean canStreamTriggers(Container c) { return false; } + default boolean canStreamTriggers(Container c) + { + return false; + } + + /** + * Returns the full set of columns managed by triggers for this TableInfo. + */ + default @Nullable Set getTriggerManagedColumns(@Nullable Container c, QueryUpdateService.InsertOption insertOption) + { + return null; + } /** * Reset the trigger script context by reloading them. Note there could still be caches that need to be reset @@ -632,9 +659,12 @@ void fireRowTrigger(Container c, User user, TriggerType type, boolean before, in /** * Returns true if the underlying database table has triggers. */ - default boolean hasDbTriggers() { return false; } + default boolean hasDbTriggers() + { + return false; + } - /* for asserting that tableinfo is not changed unexpectedly */ + /* for asserting that the TableInfo is not changed unexpectedly */ void setLocked(boolean b); boolean isLocked(); diff --git a/api/src/org/labkey/api/data/triggers/ScriptTrigger.java b/api/src/org/labkey/api/data/triggers/ScriptTrigger.java index 3b83669bb7f..57fd62b80f7 100644 --- a/api/src/org/labkey/api/data/triggers/ScriptTrigger.java +++ b/api/src/org/labkey/api/data/triggers/ScriptTrigger.java @@ -17,12 +17,14 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import org.labkey.api.collections.Sets; import org.labkey.api.data.Container; import org.labkey.api.data.ContainerManager; import org.labkey.api.data.DbScope; import org.labkey.api.data.PHI; import org.labkey.api.data.TableInfo; import org.labkey.api.query.BatchValidationException; +import org.labkey.api.query.QueryUpdateService; import org.labkey.api.query.ValidationException; import org.labkey.api.script.ScriptReference; import org.labkey.api.security.User; @@ -44,25 +46,26 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.function.Supplier; /** * Implements a trigger for table operations backed by JavaScript code. - * User: kevink - * Date: 12/21/15 */ public class ScriptTrigger implements Trigger { public static final String SERVER_CONTEXT_KEY = "~~ServerContext~~"; - public static final String SERVER_CONTEXT_SCRIPTNAME = "serverContext"; + public static final String SERVER_CONTEXT_SCRIPT_NAME = "serverContext"; @NotNull protected final Container _container; @NotNull protected final TableInfo _table; @NotNull protected final ScriptReference _script; + @Nullable protected volatile ManagedColumns _managedColumns = null; protected ScriptTrigger(@NotNull Container c, @NotNull TableInfo table, @NotNull ScriptReference script) { @@ -112,6 +115,38 @@ public boolean canStream() return false; } + @Override + public @Nullable ManagedColumns getManagedColumns() + { + if (_managedColumns == null) + _managedColumns = resolveManagedColumns(); + + return _managedColumns; + } + + private @NotNull ManagedColumns resolveManagedColumns() + { + var user = _table.getUserSchema() != null ? _table.getUserSchema().getUser() : null; + var result = _invokeTableScript(_container, user, Object.class, "managedColumns", null, () -> null); + if (result instanceof Map map) + { + var insert = managedColumnsFromScriptMap(map, "insert"); + var update = managedColumnsFromScriptMap(map, "update"); + var ignored = managedColumnsFromScriptMap(map, "ignored"); + + return new ManagedColumns(insert, update, ignored); + } + + return ManagedColumns.empty(); + } + + private @NotNull Set managedColumnsFromScriptMap(@NotNull Map map, String key) + { + if (map.get(key) instanceof List columns) + return Sets.newCaseInsensitiveHashSet((List) columns); + return Collections.emptySet(); + } + /** * To avoid leaking PHI through log files, avoid including the full row info in the error detail when any of the * columns in the target table is considered PHI @@ -137,25 +172,17 @@ public void complete(TableInfo table, Container c, User user, TableInfo.TriggerT invokeTableScript(table, c, user, "complete", errors, extraContext, () -> null, event.name().toLowerCase()); } - @Override public void beforeInsert(TableInfo table, Container c, - User user, @Nullable Map newRow, + User user, @Nullable QueryUpdateService.InsertOption insertOption, @Nullable Map newRow, ValidationException errors, Map extraContext) { - invokeTableScript(table, - c, - user, - "beforeInsert", - errors, - extraContext, - filterErrorDetailByPhi(table, () -> "New row data: " + newRow), - newRow); + invokeTableScript(table, c, user, "beforeInsert", errors, extraContext, filterErrorDetailByPhi(table, () -> "New row data: " + newRow), newRow); } @Override public void beforeUpdate(TableInfo table, Container c, - User user, @Nullable Map newRow, @Nullable Map oldRow, + User user, @Nullable QueryUpdateService.InsertOption insertOption, @Nullable Map newRow, @Nullable Map oldRow, ValidationException errors, Map extraContext) { invokeTableScript(table, c, user, "beforeUpdate", errors, extraContext, filterErrorDetailByPhi(table, () -> "New row: " + newRow + ". Old row: " + oldRow), newRow, oldRow); @@ -193,7 +220,6 @@ public void afterDelete(TableInfo table, Container c, invokeTableScript(table, c, user, "afterDelete", errors, extraContext, filterErrorDetailByPhi(table, () -> "Old row: " + oldRow), oldRow); } - protected void invokeTableScript(TableInfo table, Container c, User user, String methodName, BatchValidationException errors, Map extraContext, Supplier errorDetail, Object... args) { Object[] allArgs = Arrays.copyOf(args, args.length+1); @@ -205,7 +231,6 @@ protected void invokeTableScript(TableInfo table, Container c, User user, String errors.addRowError(new ValidationException("script error: " + methodName + " trigger closed the connection, possibly due to constraint violation")); } - protected void invokeTableScript(TableInfo table, Container c, User user, String methodName, ValidationException errors, Map extraContext, Supplier errorDetail, Object... args) { Object[] allArgs = Arrays.copyOf(args, args.length+1); @@ -217,7 +242,6 @@ protected void invokeTableScript(TableInfo table, Container c, User user, String errors.addGlobalError("script error: " + methodName + " trigger closed the connection, possibly due to constraint violation"); } - private boolean _hasFn(Container c, User user, String methodName) { return _try(c, user, null, (script) -> _script.hasFn(methodName)); @@ -294,7 +318,7 @@ public static class ServerContextModuleScript extends ModuleScript public ServerContextModuleScript(Script serverContext) throws URISyntaxException { - super(serverContext, new URI(BASE_URI + SERVER_CONTEXT_SCRIPTNAME + ".js"), new URI(BASE_URI)); + super(serverContext, new URI(BASE_URI + SERVER_CONTEXT_SCRIPT_NAME + ".js"), new URI(BASE_URI)); } public static ServerContextModuleScript create(Script serverContext) @@ -316,7 +340,7 @@ public static Script getServerContext(Container c, User u) Context ctx = Context.enter(); try { - return ctx.compileString("module.exports = " + jsCode, "serverContext.js", 1, null); + return ctx.compileString("module.exports = " + jsCode, SERVER_CONTEXT_SCRIPT_NAME + ".js", 1, null); } finally { @@ -329,7 +353,6 @@ interface ScriptFn R apply(ScriptReference scriptReference) throws NoSuchMethodException, ScriptException; } - private boolean isConnectionClosed(DbScope scope) { DbScope.Transaction tx = scope.getCurrentTransaction(); @@ -359,7 +382,6 @@ public boolean equals(Object o) @Override public int hashCode() { - return Objects.hash(_container, _table, _script); } } diff --git a/api/src/org/labkey/api/data/triggers/Trigger.java b/api/src/org/labkey/api/data/triggers/Trigger.java index 238d1332b23..33378ff7c2c 100644 --- a/api/src/org/labkey/api/data/triggers/Trigger.java +++ b/api/src/org/labkey/api/data/triggers/Trigger.java @@ -15,20 +15,25 @@ */ package org.labkey.api.data.triggers; +import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.json.JSONObject; +import org.labkey.api.collections.Sets; import org.labkey.api.data.Container; import org.labkey.api.data.TableInfo; import org.labkey.api.query.BatchValidationException; +import org.labkey.api.query.QueryUpdateService; import org.labkey.api.query.ValidationException; import org.labkey.api.security.User; import org.labkey.api.util.UnexpectedException; import java.lang.reflect.Method; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.stream.Collectors; /** @@ -38,19 +43,31 @@ public interface Trigger { /** The trigger name. */ - default String getName() { return getClass().getSimpleName(); } + default String getName() + { + return getClass().getSimpleName(); + } /** Short description of the trigger. */ - default String getDescription() { return null; } + default String getDescription() + { + return null; + } - /** Name of module that defines this trigger. */ - default String getModuleName() { return null; } + /** Name of the module that defines this trigger. */ + default String getModuleName() + { + return null; + } /** * For script triggers, this is the path to the trigger script. * For java triggers, this is the class name. */ - default String getSource() { return getClass().getName(); } + default String getSource() + { + return getClass().getName(); + } /** * The set of events that this trigger implements. @@ -77,10 +94,90 @@ default List getEvents() } } + record ManagedColumns(@NotNull Set insert, @NotNull Set update, @Nullable Set ignored) + { + public static ManagedColumns all(@NotNull Set all) + { + return new ManagedColumns(all, all, null); + } + + public static ManagedColumns all(@NotNull String... all) + { + return all(Sets.newCaseInsensitiveHashSet(all)); + } + + public static ManagedColumns empty() + { + return new ManagedColumns(Collections.emptySet(), Collections.emptySet(), null); + } + + public @Nullable Set getColumns(TableInfo.TriggerType type) + { + if (type == TableInfo.TriggerType.INSERT) + return insert; + if (type == TableInfo.TriggerType.UPDATE) + return update; + return null; + } + } + /** - * True if this TriggerScript can be used in a streaming context; triggers will be called without old row values. + * Returns the set of column names this trigger will read or write during row processing. + *

+ * For each row where a declared column is absent from the input, the trigger must + * explicitly set each such column to null or a real value; failure to do so produces + * a validation error naming this trigger and the unhandled column. + *

+ * Columns that do not exist in the target table's schema (virtual/passthrough columns) may be + * declared here and will work correctly — the database writer ignores them. */ - default boolean canStream() { return false; } + default @Nullable ManagedColumns getManagedColumns() + { + return null; + } + + default void setInsertManagedColumns(Map newRow, @Nullable Map existingRecord, @Nullable QueryUpdateService.InsertOption insertOption) + { + // If this is a merge operation and the existingRecord is not supplied, + // then throw an error to avoid overwriting managed values to null. + if (insertOption != null && insertOption.mergeRows && (existingRecord == null || existingRecord.isEmpty())) + throw new IllegalArgumentException("existingRecord must be non-null for MERGE triggers"); + + setManagedColumns(newRow, null, TableInfo.TriggerType.INSERT); + } + + default void setUpdateManagedColumns(Map newRow, @NotNull Map oldRow) + { + if (oldRow == null) + throw new IllegalArgumentException("oldRow must be non-null for UPDATE triggers"); + + setManagedColumns(newRow, oldRow, TableInfo.TriggerType.UPDATE); + } + + private void setManagedColumns(Map newRow, Map oldRow, TableInfo.TriggerType type) + { + if (newRow == null) + return; + + var managedCols = getManagedColumns(); + if (managedCols == null) + return; + + var cols = managedCols.getColumns(type); + if (cols == null) + return; + + for (var col : cols) + newRow.putIfAbsent(col, oldRow == null ? null : oldRow.get(col)); + } + + /** + * Returns true if this TriggerScript can be used in a streaming context; triggers will be called without old row values. + */ + default boolean canStream() + { + return false; + } default void batchTrigger(TableInfo table, Container c, User user, TableInfo.TriggerType event, boolean before, BatchValidationException errors, Map extraContext) { @@ -98,7 +195,8 @@ default void complete(TableInfo table, Container c, User user, TableInfo.Trigger { } - default void rowTrigger(TableInfo table, Container c, User user, TableInfo.TriggerType event, boolean before, int rowNumber, + default void rowTrigger(TableInfo table, Container c, User user, TableInfo.TriggerType event, + @Nullable QueryUpdateService.InsertOption insertOption, boolean before, int rowNumber, @Nullable Map newRow, @Nullable Map oldRow, ValidationException errors, Map extraContext, @Nullable Map existingRecord) throws ValidationException @@ -108,10 +206,10 @@ default void rowTrigger(TableInfo table, Container c, User user, TableInfo.Trigg switch (event) { case INSERT: - beforeInsert(table, c, user, newRow, errors, extraContext, existingRecord); + beforeInsert(table, c, user, insertOption, newRow, errors, extraContext, existingRecord); break; case UPDATE: - beforeUpdate(table, c, user, newRow, oldRow, errors, extraContext); + beforeUpdate(table, c, user, insertOption, newRow, oldRow, errors, extraContext); break; case DELETE: beforeDelete(table, c, user, oldRow, errors, extraContext); @@ -136,20 +234,20 @@ default void rowTrigger(TableInfo table, Container c, User user, TableInfo.Trigg } default void beforeInsert(TableInfo table, Container c, - User user, @Nullable Map newRow, + User user, @Nullable QueryUpdateService.InsertOption insertOption, @Nullable Map newRow, ValidationException errors, Map extraContext) throws ValidationException { } default void beforeInsert(TableInfo table, Container c, - User user, @Nullable Map newRow, + User user, @Nullable QueryUpdateService.InsertOption insertOption, @Nullable Map newRow, ValidationException errors, Map extraContext, @Nullable Map existingRecord) throws ValidationException { - beforeInsert(table, c, user, newRow, errors, extraContext); + beforeInsert(table, c, user, insertOption, newRow, errors, extraContext); } default void beforeUpdate(TableInfo table, Container c, - User user, @Nullable Map newRow, @Nullable Map oldRow, + User user, @Nullable QueryUpdateService.InsertOption insertOption, @Nullable Map newRow, @Nullable Map oldRow, ValidationException errors, Map extraContext) throws ValidationException { } diff --git a/api/src/org/labkey/api/dataiterator/TriggerDataBuilderHelper.java b/api/src/org/labkey/api/dataiterator/TriggerDataBuilderHelper.java index 7712726b939..3386cc4ae67 100644 --- a/api/src/org/labkey/api/dataiterator/TriggerDataBuilderHelper.java +++ b/api/src/org/labkey/api/dataiterator/TriggerDataBuilderHelper.java @@ -21,13 +21,16 @@ import org.labkey.api.data.triggers.Trigger; import org.labkey.api.exp.query.ExpTable; import org.labkey.api.query.BatchValidationException; +import org.labkey.api.query.QueryService; import org.labkey.api.query.QueryUpdateService; import org.labkey.api.query.ValidationException; import org.labkey.api.security.User; import java.util.Collection; import java.util.Map; +import java.util.Objects; import java.util.Set; +import java.util.function.Supplier; import static org.labkey.api.admin.FolderImportContext.IS_NEW_FOLDER_IMPORT_KEY; import static org.labkey.api.util.IntegerUtils.asInteger; @@ -56,7 +59,6 @@ public DataIteratorBuilder before(DataIteratorBuilder in) return new Before(in); } - public DataIteratorBuilder after(DataIteratorBuilder in) { return new After(in); @@ -114,21 +116,46 @@ protected Map getOldRow() class Before implements DataIteratorBuilder { - final DataIteratorBuilder _pre; + final DataIteratorBuilder _in; Before(DataIteratorBuilder in) { - _pre = in; + _in = in; } @Override public DataIterator getDataIterator(DataIteratorContext context) { - DataIterator di = _pre.getDataIterator(context); + DataIterator di = _in.getDataIterator(context); + if (di == null) + return null; // can happen if context has errors + if (!_target.hasTriggers(_c)) return di; di = LoggingDataIterator.wrap(di); + // Incorporate columns managed by triggers that may not overlap with the requested column set + var triggerColumns = _target.getTriggerManagedColumns(_c, context.getInsertOption()); + if (triggerColumns != null && !triggerColumns.isEmpty()) + { + var columns = triggerColumns.stream().map(_target::getColumn).filter(Objects::nonNull).toList(); + if (!columns.isEmpty()) + { + var translator = new SimpleTranslator(di, context); + translator.setDebugName("TriggerDataBuilderHelper.Before.translator"); + translator.selectAll(); + + var columnNameMap = translator.getColumnNameMap(); + + for (var column : columns) + { + if (!columnNameMap.containsKey(column.getName())) + translator.addColumn(column, (Supplier) () -> null); + } + di = translator.getDataIterator(context); + } + } + Set existingRecordKeyColumnNames = null; Set sharedKeys = null; boolean isMergeOrUpdate = context.getInsertOption().allowUpdate; @@ -162,15 +189,16 @@ public DataIterator getDataIterator(DataIteratorContext context) } } - class BeforeIterator extends TriggerDataIterator { boolean _firstRow = true; - Map _currentRow = null; + Map _currentRow = null; + private final boolean _manageColumns; BeforeIterator(DataIterator di, DataIteratorContext context) { super(di, context); + _manageColumns = QueryService.get().isTriggerManagedColumnsEnabled(); } @Override @@ -180,7 +208,6 @@ public boolean isScrollable() return false; } - @Override public boolean next() throws BatchValidationException { @@ -188,7 +215,7 @@ public boolean next() throws BatchValidationException TableInfo.TriggerType triggerType = getTriggerType(); if (_firstRow) { - _target.fireBatchTrigger(_c, _user, triggerType, true, getErrors(), _extraContext); + _target.fireBatchTrigger(_c, _user, triggerType, _context.getInsertOption(), true, getErrors(), _extraContext); firedInit = true; _firstRow = false; } @@ -199,7 +226,7 @@ public boolean next() throws BatchValidationException _currentRow = getInput().getMap(); try { - _target.fireRowTrigger(_c, _user, triggerType, true, rowNumber, _currentRow, getOldRow(), _extraContext, getExistingRecord()); + _target.fireRowTrigger(_c, _user, triggerType, _context.getInsertOption(), true, rowNumber, _currentRow, getOldRow(), _extraContext, getExistingRecord(), _manageColumns); return true; } catch (ValidationException vex) @@ -212,7 +239,6 @@ public boolean next() throws BatchValidationException return false; } - @Override public Object get(int i) { @@ -224,27 +250,29 @@ public Object get(int i) } } - class After implements DataIteratorBuilder { - final DataIteratorBuilder _post; + final DataIteratorBuilder _in; After(DataIteratorBuilder in) { - _post = in; + _in = in; } @Override public DataIterator getDataIterator(DataIteratorContext context) { - DataIterator it = _post.getDataIterator(context); + DataIterator di = _in.getDataIterator(context); + if (di == null) + return null; // can happen if context has errors + if (!_target.hasTriggers(_c)) - return it; - return new AfterIterator(LoggingDataIterator.wrap(it), context); + return di; + + return new AfterIterator(LoggingDataIterator.wrap(di), context); } } - class AfterIterator extends TriggerDataIterator { AfterIterator(DataIterator di, DataIteratorContext context) @@ -265,7 +293,7 @@ public boolean next() throws BatchValidationException Map newRow = getInput().getMap(); try { - _target.fireRowTrigger(_c, _user, getTriggerType(), false, rowNumber, newRow, getOldRow(), _extraContext, getExistingRecord()); + _target.fireRowTrigger(_c, _user, getTriggerType(), _context.getInsertOption(), false, rowNumber, newRow, getOldRow(), _extraContext, getExistingRecord(), false); } catch (ValidationException vex) { @@ -277,7 +305,7 @@ public boolean next() throws BatchValidationException finally { if (!hasNext && firedInit && !getErrors().hasErrors()) - _target.fireBatchTrigger(_c, _user, getTriggerType(), false, getErrors(), _extraContext); + _target.fireBatchTrigger(_c, _user, getTriggerType(), _context.getInsertOption(), false, getErrors(), _extraContext); } } } diff --git a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java index 7a768bcb915..c3739fcdbfd 100644 --- a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java +++ b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java @@ -77,7 +77,6 @@ import org.labkey.api.files.FileContentService; import org.labkey.api.gwt.client.AuditBehaviorType; import org.labkey.api.ontology.OntologyService; -import org.labkey.api.ontology.Quantity; import org.labkey.api.pipeline.PipeRoot; import org.labkey.api.pipeline.PipelineService; import org.labkey.api.reader.TabLoader; @@ -605,7 +604,7 @@ protected List> _insertRowsUsingInsertRow(User user, Contain errors.setExtraContext(extraScriptContext); if (hasTableScript) - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, true, errors, extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, null, true, errors, extraScriptContext); List> result = new ArrayList<>(rows.size()); List> providedValues = new ArrayList<>(rows.size()); @@ -659,7 +658,7 @@ else if (SqlDialect.isTransactionException(sqlx) && errors.hasErrors()) } if (hasTableScript) - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, false, errors, extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, null, false, errors, extraScriptContext); addAuditEvent(user, container, QueryService.AuditAction.INSERT, null, result, null, providedValues); @@ -837,7 +836,7 @@ public List> updateRows(User user, Container container, List assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, true, errors, extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, null, true, errors, extraScriptContext); List> result = new ArrayList<>(rows.size()); List> oldRows = new ArrayList<>(rows.size()); @@ -889,7 +888,7 @@ public List> updateRows(User user, Container container, List } // Fire triggers, if any, and also throw if there are any errors - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, false, errors, extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, null, false, errors, extraScriptContext); afterInsertUpdate(null==result?0:result.size(), errors, true); if (errors.hasErrors()) @@ -961,7 +960,7 @@ public List> deleteRows(User user, Container container, List BatchValidationException errors = new BatchValidationException(); errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, true, errors, extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, null, true, errors, extraScriptContext); // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; @@ -1006,7 +1005,7 @@ public List> deleteRows(User user, Container container, List } // Fire triggers, if any, and also throw if there are any errors - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, false, errors, extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, null, false, errors, extraScriptContext); addAuditEvent(user, container, QueryService.AuditAction.DELETE, configParameters, result, null, null); @@ -1028,11 +1027,11 @@ public int truncateRows(User user, Container container, @Nullable Map newRow, @Nullable Map oldRow, ValidationException errors, diff --git a/assay/src/org/labkey/assay/plate/data/WellTriggerFactory.java b/assay/src/org/labkey/assay/plate/data/WellTriggerFactory.java index 8a67ffdf7d7..3386ab0d896 100644 --- a/assay/src/org/labkey/assay/plate/data/WellTriggerFactory.java +++ b/assay/src/org/labkey/assay/plate/data/WellTriggerFactory.java @@ -17,6 +17,7 @@ import org.labkey.api.data.triggers.TriggerFactory; import org.labkey.api.query.BatchValidationException; import org.labkey.api.query.QueryService; +import org.labkey.api.query.QueryUpdateService; import org.labkey.api.query.SimpleValidationError; import org.labkey.api.query.UserSchema; import org.labkey.api.query.ValidationException; @@ -53,13 +54,13 @@ private class ValidateRunImportedPlateTrigger implements Trigger @Override public void beforeUpdate( - TableInfo table, - Container c, - User user, - @Nullable Map newRow, - @Nullable Map oldRow, - ValidationException errors, - Map extraContext + TableInfo table, + Container c, + User user, + @Nullable QueryUpdateService.InsertOption insertOption, @Nullable Map newRow, + @Nullable Map oldRow, + ValidationException errors, + Map extraContext ) throws ValidationException { if (oldRow == null || errors.hasErrors() || !oldRow.containsKey(WellTable.Column.PlateId.name())) @@ -88,6 +89,12 @@ private class EnsureSampleWellTypeTrigger implements Trigger { private final Map wellTypeMap = new LRUMap<>(PlateSet.MAX_PLATE_SET_WELLS); + @Override + public @Nullable ManagedColumns getManagedColumns() + { + return ManagedColumns.all(WellTable.Column.Type.name()); + } + private void addTypeSample( Container c, User user, @@ -142,12 +149,15 @@ public void beforeInsert( TableInfo table, Container c, User user, + @Nullable QueryUpdateService.InsertOption insertOption, @Nullable Map newRow, ValidationException errors, - Map extraContext + Map extraContext, + @Nullable Map existingRecord ) { addTypeSample(c, user, newRow, null, extraContext); + setInsertManagedColumns(newRow, existingRecord, insertOption); } @Override @@ -155,6 +165,7 @@ public void beforeUpdate( TableInfo table, Container c, User user, + @Nullable QueryUpdateService.InsertOption insertOption, @Nullable Map newRow, @Nullable Map oldRow, ValidationException errors, @@ -162,6 +173,7 @@ public void beforeUpdate( ) { addTypeSample(c, user, newRow, oldRow, extraContext); + setUpdateManagedColumns(newRow, oldRow); } } diff --git a/core/src/org/labkey/core/query/UsersTable.java b/core/src/org/labkey/core/query/UsersTable.java index 81843f9af0e..b84e5a4e63a 100644 --- a/core/src/org/labkey/core/query/UsersTable.java +++ b/core/src/org/labkey/core/query/UsersTable.java @@ -453,9 +453,9 @@ public SQLFragment toSQLFragment(Map columnMap, } @Override - public void fireRowTrigger(Container c, User user, TriggerType type, boolean before, int rowNumber, @Nullable Map newRow, @Nullable Map oldRow, Map extraContext, @Nullable Map existingRecord) throws ValidationException + public void fireRowTrigger(Container c, User user, TriggerType type, @Nullable QueryUpdateService.InsertOption insertOption, boolean before, int rowNumber, @Nullable Map newRow, @Nullable Map oldRow, Map extraContext, @Nullable Map existingRecord, boolean manageColumns) throws ValidationException { - super.fireRowTrigger(c, user, type, before, rowNumber, newRow, oldRow, extraContext, existingRecord); + super.fireRowTrigger(c, user, type, insertOption, before, rowNumber, newRow, oldRow, extraContext, existingRecord, manageColumns); Integer userId = null!=oldRow ? asInteger(oldRow.get("UserId")) : null!=newRow ? asInteger(newRow.get("UserId")) : null; if (null != userId && !before) UserManager.fireUserPropertiesChanged(userId); diff --git a/core/src/org/labkey/core/script/RhinoService.java b/core/src/org/labkey/core/script/RhinoService.java index 2def34d20ba..c317b9438fe 100644 --- a/core/src/org/labkey/core/script/RhinoService.java +++ b/core/src/org/labkey/core/script/RhinoService.java @@ -807,7 +807,7 @@ protected Scriptable getRuntimeScope(ScriptContext ctxt) } // Other JS scripts can call require('serverContext') to load this. - extraModules = Map.of(ScriptTrigger.SERVER_CONTEXT_SCRIPTNAME, scriptContextScript); + extraModules = Map.of(ScriptTrigger.SERVER_CONTEXT_SCRIPT_NAME, scriptContextScript); } Require require = new Require(cx, getTopLevel(), new WrappingModuleScriptProvider(_moduleScriptProvider, extraModules), null, null, true); diff --git a/experiment/src/org/labkey/experiment/api/ExperimentServiceImpl.java b/experiment/src/org/labkey/experiment/api/ExperimentServiceImpl.java index 401357afcc1..376e4a5353a 100644 --- a/experiment/src/org/labkey/experiment/api/ExperimentServiceImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExperimentServiceImpl.java @@ -9694,7 +9694,7 @@ public Map moveDataClassObjects(Collection d // Since those tables already wire up trigger scripts, we'll use that mechanism here as well for the move event. BatchValidationException errors = new BatchValidationException(); Map extraContext = Map.of("targetContainer", targetContainer, "classObjects", classObjects, "dataIds", dataIds); - dataClassTable.fireBatchTrigger(sourceContainer, user, TableInfo.TriggerType.MOVE, false, errors, extraContext); + dataClassTable.fireBatchTrigger(sourceContainer, user, TableInfo.TriggerType.MOVE, null, false, errors, extraContext); if (errors.hasErrors()) throw errors; diff --git a/list/src/org/labkey/list/model/ListQueryUpdateService.java b/list/src/org/labkey/list/model/ListQueryUpdateService.java index b4747339634..a05cda0b6d1 100644 --- a/list/src/org/labkey/list/model/ListQueryUpdateService.java +++ b/list/src/org/labkey/list/model/ListQueryUpdateService.java @@ -551,7 +551,7 @@ public Map moveRows( // Before trigger per batch Map extraContext = Map.of("targetContainer", targetContainer, "keys", rowPks); - listTable.fireBatchTrigger(sourceContainer, user, TableInfo.TriggerType.MOVE, true, errors, extraContext); + listTable.fireBatchTrigger(sourceContainer, user, TableInfo.TriggerType.MOVE, null, true, errors, extraContext); if (errors.hasErrors()) throw errors; @@ -574,7 +574,7 @@ public Map moveRows( listAuditEventsCreatedCount += addDetailedMoveAuditEvents(user, sourceContainer, targetContainer, batch); // After trigger per batch - listTable.fireBatchTrigger(sourceContainer, user, TableInfo.TriggerType.MOVE, false, errors, extraContext); + listTable.fireBatchTrigger(sourceContainer, user, TableInfo.TriggerType.MOVE, null, false, errors, extraContext); if (errors.hasErrors()) throw errors; } diff --git a/query/src/org/labkey/query/QueryModule.java b/query/src/org/labkey/query/QueryModule.java index 888f6ebf63c..99ec32e9506 100644 --- a/query/src/org/labkey/query/QueryModule.java +++ b/query/src/org/labkey/query/QueryModule.java @@ -247,6 +247,8 @@ public QuerySchema createSchema(DefaultSchema schema, Module module) "Allow for lookup fields in product folders to query across all folders within the top-level folder.", false); OptionalFeatureService.get().addExperimentalFeatureFlag(QueryServiceImpl.EXPERIMENTAL_PRODUCT_PROJECT_DATA_LISTING_SCOPED, "Product folders display folder-specific data", "Only list folder-specific data within product folders.", false); + OptionalFeatureService.get().addExperimentalFeatureFlag(QueryService.EXPERIMENTAL_DISABLE_MANAGED_TRIGGER_COLUMNS, "Disable managed columns in query triggers", + "By default LabKey enforces managed columns for triggers and errors when the data does not align. Enabling this feature will result in them only logging warnings.", false); McpService.get().register(new QueryMcp()); } diff --git a/query/src/org/labkey/query/QueryServiceImpl.java b/query/src/org/labkey/query/QueryServiceImpl.java index beca1695766..a79f16f11db 100644 --- a/query/src/org/labkey/query/QueryServiceImpl.java +++ b/query/src/org/labkey/query/QueryServiceImpl.java @@ -33,7 +33,6 @@ import org.json.JSONObject; import org.junit.Assert; import org.junit.Test; -import org.labkey.api.action.ApiUsageException; import org.labkey.api.assay.AssayService; import org.labkey.api.audit.AbstractAuditHandler; import org.labkey.api.audit.AuditHandler; @@ -3577,6 +3576,11 @@ public boolean isProductFoldersDataListingScopedToProject() return AppProps.getInstance().isOptionalFeatureEnabled(EXPERIMENTAL_PRODUCT_PROJECT_DATA_LISTING_SCOPED); } + public boolean isTriggerManagedColumnsEnabled() + { + return !AppProps.getInstance().isOptionalFeatureEnabled(EXPERIMENTAL_DISABLE_MANAGED_TRIGGER_COLUMNS); + } + public static class TestCase extends Assert { @Test diff --git a/specimen/src/org/labkey/specimen/query/SpecimenUpdateService.java b/specimen/src/org/labkey/specimen/query/SpecimenUpdateService.java index e9d88406623..9368188076c 100644 --- a/specimen/src/org/labkey/specimen/query/SpecimenUpdateService.java +++ b/specimen/src/org/labkey/specimen/query/SpecimenUpdateService.java @@ -70,7 +70,6 @@ public SpecimenUpdateService(TableInfo queryTable) super(queryTable); } - @Override public int importRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, Map configParameters, @Nullable Map extraScriptContext) { @@ -94,7 +93,7 @@ public List> deleteRows(User user, Container container, List BatchValidationException errors = new BatchValidationException(); errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, true, errors, extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, null, true, errors, extraScriptContext); Set rowIds = new HashSet<>(keys.size()); for (Map key : keys) @@ -139,7 +138,7 @@ public List> deleteRows(User user, Container container, List throw new IllegalStateException(e.getMessage()); } - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, false, errors, extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, null, false, errors, extraScriptContext); addAuditEvent(user, container, QueryService.AuditAction.DELETE, configParameters, null, null, null); @@ -203,7 +202,7 @@ public List> insertRows(User user, Container container, List try { if (hasTableScript) - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, true, errors, extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, null, true, errors, extraScriptContext); } catch (BatchValidationException e) { @@ -263,7 +262,7 @@ public List> insertRows(User user, Container container, List try { if (hasTableScript) - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, false, errors, extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, null, false, errors, extraScriptContext); } catch (BatchValidationException e) { @@ -332,7 +331,7 @@ public List> updateRows(User user, Container container, List throw new IllegalArgumentException("rows and oldKeys are required to be the same length, but were " + rows.size() + " and " + oldKeys + " in length, respectively"); errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, true, errors, extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, null, true, errors, extraScriptContext); Set rowIds = new HashSet<>(rows.size()); Map> uniqueRows = new LongHashMap<>(rows.size()); @@ -406,7 +405,7 @@ public List> updateRows(User user, Container container, List throw new IllegalStateException(e.getMessage()); } - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, false, errors, extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, null, false, errors, extraScriptContext); addAuditEvent(user, container, QueryService.AuditAction.UPDATE, configParameters, rows, null, null);