Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.function.Predicate;
import org.apache.iceberg.BaseMetastoreOperations.CommitStatus;
import org.apache.iceberg.encryption.EncryptionManager;
import org.apache.iceberg.exceptions.AlreadyExistsException;
import org.apache.iceberg.exceptions.CommitFailedException;
Expand Down Expand Up @@ -294,20 +295,39 @@ public long newSnapshotId() {
* were attempting to set. This is used as a last resort when we are dealing with exceptions that
* may indicate the commit has failed but are not proof that this is the case. Past locations must
* also be searched on the chance that a second committer was able to successfully commit on top
* of our commit.
* of our commit. When the {@code newMetadataLocation} is not found, the method returns {@link
* CommitStatus#UNKNOWN}.
*
* @param newMetadataLocation the path of the new commit file
* @param config metadata to use for configuration
* @return Commit Status of Success, Failure or Unknown
* @return Commit Status of Success, Unknown
*/
protected CommitStatus checkCommitStatus(String newMetadataLocation, TableMetadata config) {
return CommitStatus.valueOf(
checkCommitStatus(
tableName(),
newMetadataLocation,
config.properties(),
() -> checkCurrentMetadataLocation(newMetadataLocation))
.name());
return checkCommitStatus(
tableName(),
newMetadataLocation,
config.properties(),
() -> checkCurrentMetadataLocation(newMetadataLocation));
}

/**
* Attempt to load the table and see if any current or past metadata location matches the one we
* were attempting to set. This is used as a last resort when we are dealing with exceptions that
* may indicate the commit has failed but are not proof that this is the case. Past locations must
* also be searched on the chance that a second committer was able to successfully commit on top
* of our commit. When the {@code newMetadataLocation} is not found, the method returns {@link
* CommitStatus#FAILURE}.
*
* @param newMetadataLocation the path of the new commit file
* @param config metadata to use for configuration
* @return Commit Status of Success, Failure or Unknown
*/
protected CommitStatus checkCommitStatusStrict(String newMetadataLocation, TableMetadata config) {
return checkCommitStatusStrict(
tableName(),
newMetadataLocation,
config.properties(),
() -> checkCurrentMetadataLocation(newMetadataLocation));
}

/**
Expand Down
3 changes: 3 additions & 0 deletions ice/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,9 @@ Supported codecs: `zstd`, `snappy`, `gzip`, `lz4`.
# add a column to an existing table
ice alter-table flowers.iris '[{"op":"add_column","name":"extra","type":"string"}]'

# add a NOT NULL column (`required: true`; uses Iceberg allowIncompatibleChanges for that alter)
ice alter-table flowers.iris '[{"op":"add_column","name":"extra","type":"string","required":true}]'

# verify the schema change
ice describe -s flowers.iris
```
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,11 @@
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.UUID;
import javax.annotation.Nullable;
import org.apache.iceberg.Table;
import org.apache.iceberg.Transaction;
Expand All @@ -22,6 +26,7 @@
import org.apache.iceberg.UpdateSchema;
import org.apache.iceberg.catalog.Catalog;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.expressions.Literal;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;
import org.slf4j.Logger;
Expand Down Expand Up @@ -53,23 +58,91 @@ public static class AddColumn extends Update {
@Nullable private final String after;
@Nullable private final String before;
private final boolean first;
private final boolean required;

public AddColumn(
@JsonProperty(value = "name", required = true) String name,
@JsonProperty(value = "type", required = true) String type,
@JsonProperty("doc") @Nullable String doc,
@JsonProperty("after") @Nullable String after,
@JsonProperty("before") @Nullable String before,
@JsonProperty("first") @Nullable Boolean first) {
@JsonProperty("first") @Nullable Boolean first,
@JsonProperty("required") @Nullable Boolean required) {
this.name = name;
this.type = Types.fromPrimitiveString(type);
this.doc = doc;
this.after = after;
this.before = before;
this.first = first != null && first;
this.required = required != null && required;
}
}

// Coerce a JSON literal (number / boolean / string) into an Iceberg Literal of the requested
// type. For numeric, boolean and string types we build the Literal directly; for complex
// primitives (date/time/timestamp/decimal/uuid/binary) we accept a string form and let
// Iceberg parse it via Literal<CharSequence>.to(type).
@Nullable
private static Literal<?> coerceDefault(Type type, @Nullable Object jsonValue) {
if (jsonValue == null) {
return null;
}
try {
switch (type.typeId()) {
case BOOLEAN:
if (jsonValue instanceof Boolean b) {
return Literal.of(b);
}
return Literal.of(Boolean.parseBoolean(jsonValue.toString()));
case INTEGER:
return Literal.of(((Number) numberOrParse(jsonValue)).intValue());
case LONG:
return Literal.of(((Number) numberOrParse(jsonValue)).longValue());
case FLOAT:
return Literal.of(((Number) numberOrParse(jsonValue)).floatValue());
case DOUBLE:
return Literal.of(((Number) numberOrParse(jsonValue)).doubleValue());
case STRING:
return Literal.of(jsonValue.toString());
case BINARY:
case FIXED:
return Literal.of(ByteBuffer.wrap(jsonValue.toString().getBytes(StandardCharsets.UTF_8)));
case DECIMAL:
return Literal.of(new BigDecimal(jsonValue.toString())).to(type);
case UUID:
return Literal.of(UUID.fromString(jsonValue.toString()));
case DATE:
case TIME:
case TIMESTAMP:
case TIMESTAMP_NANO:
// Let Iceberg parse the canonical string form (e.g. "2025-01-01",
// "2025-01-01T00:00:00", "2025-01-01T00:00:00+00:00").
Literal<?> parsed = Literal.of(jsonValue.toString()).to(type);
if (parsed == null) {
throw new IllegalArgumentException(
String.format("Cannot parse default value '%s' as %s", jsonValue, type));
}
return parsed;
default:
throw new IllegalArgumentException(
String.format("Defaults not supported for type %s", type));
}
} catch (ClassCastException | IllegalArgumentException e) {
throw new IllegalArgumentException(
String.format("Cannot coerce default value '%s' to type %s", jsonValue, type), e);
}
}

// Accept either a JSON number (already a Number) or a string parseable as a number.
private static Number numberOrParse(Object jsonValue) {
if (jsonValue instanceof Number n) {
return n;
}
String s = jsonValue.toString();
// Use BigDecimal to accept any numeric form; downstream casts narrow to int/long/float/double.
return new BigDecimal(s);
}

public static class AlterColumn extends Update {
private final String name;
private final Type.PrimitiveType type;
Expand Down Expand Up @@ -148,7 +221,18 @@ public static void run(Catalog catalog, TableIdentifier tableId, List<Update> up
case AddColumn up -> {
// TODO: support nested columns
UpdateSchema us = schemaUpdates.getValue();
us.addColumn(up.name, up.type, up.doc);
if (up.required) {
// Iceberg rejects required adds without an initial default unless incompatible
// changes are explicitly allowed (even for empty tables with a snapshot).
us.allowIncompatibleChanges();
if (up.doc != null) {
us.addRequiredColumn(up.name, up.type, up.doc);
} else {
us.addRequiredColumn(up.name, up.type);
}
} else {
us.addColumn(up.name, up.type, up.doc);
}
if (up.after != null) {
us.moveAfter(up.name, up.after);
} else if (up.before != null) {
Expand Down
Loading
Loading