本文整理了Java中org.apache.kafka.connect.data.Decimal.schema()
方法的一些代码示例,展示了Decimal.schema()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Decimal.schema()
方法的具体详情如下:
包路径:org.apache.kafka.connect.data.Decimal
类名称:Decimal
方法名:schema
暂无
代码示例来源:origin: hpgrahsl/kafka-connect-mongodb
public DecimalFieldConverter() {
super(Decimal.schema(0));
this.format = Format.DECIMAL128;
}
代码示例来源:origin: hpgrahsl/kafka-connect-mongodb
public DecimalFieldConverter(Format format) {
super(Decimal.schema(0));
this.format = format;
}
代码示例来源:origin: hpgrahsl/kafka-connect-mongodb
public DecimalFieldConverter(Format format) {
super(Decimal.schema(0));
this.format = format;
}
代码示例来源:origin: hpgrahsl/kafka-connect-mongodb
public DecimalFieldConverter() {
super(Decimal.schema(0));
this.format = Format.DECIMAL128;
}
代码示例来源:origin: org.apache.kafka/connect-api
/**
* Convert the specified value to an {@link Decimal decimal} value.
* Not supplying a schema may limit the ability to convert to the desired type.
*
* @param schema the schema for the value; may be null
* @param value the value to be converted; may be null
* @return the representation as a decimal, or null if the supplied value was null
* @throws DataException if the value cannot be converted to a decimal value
*/
public static BigDecimal convertToDecimal(Schema schema, Object value, int scale) {
return (BigDecimal) convertTo(Decimal.schema(scale), schema, value);
}
代码示例来源:origin: org.apache.kafka/connect-api
@Override
public Headers addDecimal(String key, BigDecimal value) {
if (value == null) {
return add(key, null, null);
}
// Check that this is a decimal ...
Schema schema = Decimal.schema(value.scale());
Decimal.fromLogical(schema, value);
return addWithoutValidating(key, value, schema);
}
代码示例来源:origin: hpgrahsl/kafka-connect-mongodb
@TestFactory
@DisplayName("tests for logical type decimal field conversions (legacy)")
public List<DynamicTest> testDecimalFieldConverterLegacy() {
SinkFieldConverter converter =
new DecimalFieldConverter(DecimalFieldConverter.Format.LEGACYDOUBLE);
List<DynamicTest> tests = new ArrayList<>();
new ArrayList<>(Arrays.asList(
new BigDecimal("-1234567890.09876543210"),
BigDecimal.ZERO,
new BigDecimal("+1234567890.09876543210")
)).forEach(
el -> tests.add(dynamicTest("conversion with "
+ converter.getClass().getSimpleName() + " for "+el,
() -> assertEquals(el.doubleValue(),
((BsonDouble)converter.toBson(el)).getValue())
))
);
tests.add(dynamicTest("optional type conversions", () -> {
Schema valueOptionalDefault = Decimal.builder(0).optional().defaultValue(BigDecimal.ZERO);
assertAll("checks",
() -> assertThrows(DataException.class, () -> converter.toBson(null, Decimal.schema(0))),
() -> assertEquals(new BsonNull(), converter.toBson(null, Decimal.builder(0).optional())),
() -> assertEquals(((BigDecimal)valueOptionalDefault.defaultValue()).doubleValue(),
((BsonDouble)converter.toBson(null,valueOptionalDefault)).getValue())
);
}));
return tests;
}
代码示例来源:origin: org.apache.kafka/connect-api
return new SchemaAndValue(Schema.FLOAT64_SCHEMA, dValue);
Schema schema = Decimal.schema(decimal.scale());
return new SchemaAndValue(schema, decimal);
} catch (NumberFormatException e) {
代码示例来源:origin: hpgrahsl/kafka-connect-mongodb
@TestFactory
@DisplayName("tests for logical type decimal field conversions (new)")
public List<DynamicTest> testDecimalFieldConverterNew() {
SinkFieldConverter converter = new DecimalFieldConverter();
List<DynamicTest> tests = new ArrayList<>();
new ArrayList<>(Arrays.asList(
new BigDecimal("-1234567890.09876543210"),
BigDecimal.ZERO,
new BigDecimal("+1234567890.09876543210")
)).forEach(
el -> tests.add(dynamicTest("conversion with "
+ converter.getClass().getSimpleName() + " for "+el,
() -> assertEquals(el,
((BsonDecimal128)converter.toBson(el)).getValue().bigDecimalValue())
))
);
tests.add(dynamicTest("optional type conversions", () -> {
Schema valueOptionalDefault = Decimal.builder(0).optional().defaultValue(BigDecimal.ZERO);
assertAll("checks",
() -> assertThrows(DataException.class, () -> converter.toBson(null, Decimal.schema(0))),
() -> assertEquals(new BsonNull(), converter.toBson(null, Decimal.builder(0).optional())),
() -> assertEquals(valueOptionalDefault.defaultValue(),
((BsonDecimal128)converter.toBson(null,valueOptionalDefault)).getValue().bigDecimalValue())
);
}));
return tests;
}
代码示例来源:origin: com.github.jcustenborder.kafka.connect/connect-utils
public Parser() {
this.typeParsers = new HashMap<>();
registerTypeParser(Schema.BOOLEAN_SCHEMA, new BooleanParser());
registerTypeParser(Schema.BOOLEAN_SCHEMA, new BooleanParser());
registerTypeParser(Schema.FLOAT32_SCHEMA, new Float32TypeParser());
registerTypeParser(Schema.FLOAT64_SCHEMA, new Float64TypeParser());
registerTypeParser(Schema.INT8_SCHEMA, new Int8TypeParser());
registerTypeParser(Schema.INT16_SCHEMA, new Int16TypeParser());
registerTypeParser(Schema.INT32_SCHEMA, new Int32TypeParser());
registerTypeParser(Schema.INT64_SCHEMA, new Int64TypeParser());
registerTypeParser(Schema.STRING_SCHEMA, new StringTypeParser());
registerTypeParser(Decimal.schema(1), new DecimalTypeParser());
registerTypeParser(Date.SCHEMA, new DateTypeParser());
registerTypeParser(Time.SCHEMA, new TimeTypeParser());
registerTypeParser(Timestamp.SCHEMA, new TimestampTypeParser());
}
代码示例来源:origin: io.confluent.kafka/connect-utils
public Parser() {
this.typeParsers = new HashMap<>();
registerTypeParser(Schema.BOOLEAN_SCHEMA, new BooleanParser());
registerTypeParser(Schema.BOOLEAN_SCHEMA, new BooleanParser());
registerTypeParser(Schema.FLOAT32_SCHEMA, new Float32TypeParser());
registerTypeParser(Schema.FLOAT64_SCHEMA, new Float64TypeParser());
registerTypeParser(Schema.INT8_SCHEMA, new Int8TypeParser());
registerTypeParser(Schema.INT16_SCHEMA, new Int16TypeParser());
registerTypeParser(Schema.INT32_SCHEMA, new Int32TypeParser());
registerTypeParser(Schema.INT64_SCHEMA, new Int64TypeParser());
registerTypeParser(Schema.STRING_SCHEMA, new StringTypeParser());
registerTypeParser(Decimal.schema(1), new DecimalTypeParser());
registerTypeParser(Date.SCHEMA, new DateTypeParser());
registerTypeParser(Time.SCHEMA, new TimeTypeParser());
registerTypeParser(Timestamp.SCHEMA, new TimestampTypeParser());
}
代码示例来源:origin: wepay/kafka-connect-bigquery
@Test
public void testDecimal() {
final String fieldName = "Decimal";
com.google.cloud.bigquery.Schema bigQueryExpectedSchema =
com.google.cloud.bigquery.Schema.of(
com.google.cloud.bigquery.Field.newBuilder(
fieldName,
LegacySQLTypeName.FLOAT
).setMode(
com.google.cloud.bigquery.Field.Mode.REQUIRED
).build()
);
Schema kafkaConnectTestSchema = SchemaBuilder
.struct()
.field(fieldName, Decimal.schema(0))
.build();
com.google.cloud.bigquery.Schema bigQueryTestSchema =
new BigQuerySchemaConverter(false).convertSchema(kafkaConnectTestSchema);
assertEquals(bigQueryExpectedSchema, bigQueryTestSchema);
}
代码示例来源:origin: hpgrahsl/kafka-connect-mongodb
.field("myTimestamp", Timestamp.SCHEMA)
.field("myTime", Time.SCHEMA)
.field("myDecimal", Decimal.schema(0))
.build();
内容来源于网络,如有侵权,请联系作者删除!