Sparkの...Typeってどれが対応しているのか調べた

private static DataType parseDataType(Config fieldsConfig) {
  String type = fieldsConfig.getString(FIELD_TYPE_CONFIG);
  switch (type) {
    case "string":
      return DataTypes.StringType;
    case "byte":
      return DataTypes.ByteType;
    case "short":
      return DataTypes.ShortType;
    case "int":
      return DataTypes.IntegerType;
    case "long":
      return DataTypes.LongType;
    case "float":
      return DataTypes.FloatType;
    case "double":
      return DataTypes.DoubleType;
    case "decimal":
      ConfigUtils.assertConfig(fieldsConfig, DECIMAL_SCALE_CONFIG);
      ConfigUtils.assertConfig(fieldsConfig, DECIMAL_PRECISION_CONFIG);
      return DataTypes.createDecimalType(
              fieldsConfig.getInt(DECIMAL_SCALE_CONFIG),
              fieldsConfig.getInt(DECIMAL_PRECISION_CONFIG));
    case "boolean":
      return DataTypes.BooleanType;
    case "binary":
      return DataTypes.BinaryType;
    case "date":
      return DataTypes.DateType;
    case "timestamp":
      return DataTypes.TimestampType;
    case "array":
    case "map":
    case "struct":
      throw new RuntimeException("Schema check does not currently support complex types");
    default:
      throw new RuntimeException("Unknown type: " + type);
  }
}