Package org.elasticsearch.index.mapper

Examples of org.elasticsearch.index.mapper.FieldMapper


                .startObject()
                .field("date_field", "2011/01/22 00:00:00 +02")
                .endObject()
                .copiedBytes());

        FieldMapper fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field");
        assertThat(fieldMapper, instanceOf(DateFieldMapper.class));
    }
View Full Code Here


                .field("s_long", "100")
                .field("s_double", "100.0")
                .endObject()
                .copiedBytes());

        FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long");
        assertThat(mapper, instanceOf(LongFieldMapper.class));

        mapper = defaultMapper.mappers().smartNameFieldMapper("s_double");
        assertThat(mapper, instanceOf(DoubleFieldMapper.class));
    }
View Full Code Here

                .field("s_long", "100")
                .field("s_double", "100.0")
                .endObject()
                .copiedBytes());

        FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long");
        assertThat(mapper, instanceOf(StringFieldMapper.class));

        mapper = defaultMapper.mappers().smartNameFieldMapper("s_double");
        assertThat(mapper, instanceOf(StringFieldMapper.class));
    }
View Full Code Here

    @Override protected AnalyzeResponse shardOperation(AnalyzeRequest request, int shardId) throws ElasticSearchException {
        IndexService indexService = indicesService.indexServiceSafe(request.index());
        Analyzer analyzer = null;
        String field = null;
        if (request.field() != null) {
            FieldMapper fieldMapper = indexService.mapperService().smartNameFieldMapper(request.field());
            if (fieldMapper != null) {
                analyzer = fieldMapper.indexAnalyzer();
                field = fieldMapper.names().indexName();
            }
        }
        if (field == null) {
            field = "_all";
        }
View Full Code Here

        MapperService.SmartNameFieldMappers fieldMappers = mapperService.smartName(fieldType.field(), fieldType.types());

        Analyzer queryAnalyzer = null;
        Analyzer indexAnalyzer = null;
        if (fieldMappers != null) {
            FieldMapper fieldMapper = mapperService.smartName(fieldType.field(), fieldType.types()).mapper();

            queryAnalyzer = fieldMapper.searchAnalyzer();
            if (Strings.hasLength(fieldType.indexAnalyzer())) {
                NamedAnalyzer namedAnalyzer = analysisService.analyzer(fieldType.queryAnalyzer());
                if (namedAnalyzer == null) {
                    throw new ElasticsearchException("Query analyzer[" + fieldType.queryAnalyzer() + "] does not exist.");
                }
                queryAnalyzer = namedAnalyzer.analyzer();
            }

            indexAnalyzer = fieldMapper.searchAnalyzer();
            if (Strings.hasLength(fieldType.indexAnalyzer())) {
                NamedAnalyzer namedAnalyzer = analysisService.analyzer(fieldType.indexAnalyzer());
                if (namedAnalyzer == null) {
                    throw new ElasticsearchException("Index analyzer[" + fieldType.indexAnalyzer() + "] does not exist.");
                }
View Full Code Here

                } else if ("start".equals(currentFieldName)) {
                    start = parser.intValue();
                }
            }
        }
        FieldMapper keyFieldMapper = context.mapperService().smartNameFieldMapper(
                keyFieldName);
        if ((keyFieldMapper != null)
                && !(keyFieldMapper.fieldDataType().getType().equals(LatestFacetExecutor.keyDataType.getType()))) {
            throw new FacetPhaseExecutionException(facetName,
                    "key field must be of type long but is "
                            + keyFieldMapper.fieldDataType().getType());
        }

        FieldMapper tsFieldMapper = context.mapperService().smartNameFieldMapper(tsFieldName);
        if ((tsFieldMapper != null)
                && !(tsFieldMapper.fieldDataType().getType().equals(LatestFacetExecutor.tsDataType.getType()))) {
            throw new FacetPhaseExecutionException(facetName,
                    "ts field must be of type long but is "
                            + tsFieldMapper.fieldDataType().getType());
        }

        FieldMapper valueFieldMapper = context.mapperService().smartNameFieldMapper(valueFieldName);
        if (valueFieldMapper.fieldDataType().getType().equals("int") || valueFieldMapper.fieldDataType().getType().equals("long")) {
            IndexNumericFieldData valueFieldData = context.fieldData().getForField(valueFieldMapper);
            IndexNumericFieldData keyFieldData = context.fieldData().getForField(keyFieldMapper);
            IndexNumericFieldData tsFieldData = context.fieldData().getForField(tsFieldMapper);

            return new LatestFacetExecutor(keyFieldData, valueFieldData, tsFieldData, size, start,context.cacheRecycler());
View Full Code Here

        // validation; opposed to the DateHistogramFacetParser the distinctField and interval is also required
        if (keyField == null) {
            throw new FacetPhaseExecutionException(facetName, "key field is required to be set for distinct histogram facet, either using [field] or using [key_field]");
        }
        FieldMapper keyMapper = context.smartNameFieldMapper(keyField);
        if (keyMapper == null) {
            throw new FacetPhaseExecutionException(facetName, "(key) field [" + keyField + "] not found");
        } else if (!keyMapper.fieldDataType().getType().equals("long")) {
            throw new FacetPhaseExecutionException(facetName, "(key) field [" + keyField + "] is not of type date");
        }
        if (distinctField == null) {
            throw new FacetPhaseExecutionException(facetName, "distinct field is required to be set for distinct histogram facet, either using [value_field] or using [distinctField]");
        }
        FieldMapper distinctFieldMapper = context.smartNameFieldMapper(distinctField);
        if (distinctFieldMapper == null) {
            throw new FacetPhaseExecutionException(facetName, "no mapping found for " + distinctField);
        }
        if (!intervalSet) {
            throw new FacetPhaseExecutionException(facetName, "[interval] is required to be set for distinct histogram facet");
        }


        // this is specific to the "Distinct" DateHistogram. Use a MutableDateTime to take care of the interval and rounding.
        // we set the rounding after we set the zone, for it to take affect
        if (sInterval != null) {
            int index = sInterval.indexOf(':');
            if (index != -1) {
                // set with rounding
                DateFieldParser fieldParser = dateFieldParsers.get(sInterval.substring(0, index));
                if (fieldParser == null) {
                    throw new FacetPhaseExecutionException(facetName, "failed to parse interval [" + sInterval + "] with custom rounding using built in intervals (year/month/...)");
                }
                DateTimeField field = fieldParser.parse(dateTime.getChronology());
                int rounding = this.rounding.get(sInterval.substring(index + 1));
                if (rounding == -1) {
                    throw new FacetPhaseExecutionException(facetName, "failed to parse interval [" + sInterval + "], rounding type [" + (sInterval.substring(index + 1)) + "] not found");
                }
                dateTime.setRounding(field, rounding);
            } else {
                DateFieldParser fieldParser = dateFieldParsers.get(sInterval);
                if (fieldParser != null) {
                    DateTimeField field = fieldParser.parse(dateTime.getChronology());
                    dateTime.setRounding(field, MutableDateTime.ROUND_FLOOR);
                } else {
                    // time interval
                    try {
                        interval = TimeValue.parseTimeValue(sInterval, null).millis();
                    } catch (Exception e) {
                        throw new FacetPhaseExecutionException(facetName, "failed to parse interval [" + sInterval + "], tried both as built in intervals (year/month/...) and as a time format");
                    }
                }
            }
        }



        if (distinctFieldMapper.fieldDataType().getType().equals("string")) {
            PagedBytesIndexFieldData distinctFieldData = context.fieldData().getForField(distinctFieldMapper);
            PackedArrayIndexFieldData keyIndexFieldData = context.fieldData().getForField(keyMapper);
            return new StringDistinctDateHistogramFacetExecutor(keyIndexFieldData, distinctFieldData, dateTime, interval, comparatorType, context.cacheRecycler());
        } else if (distinctFieldMapper.fieldDataType().getType().equals("long"))  {
            IndexNumericFieldData distinctFieldData = context.fieldData().getForField(distinctFieldMapper);
            IndexNumericFieldData keyIndexFieldData = context.fieldData().getForField(keyMapper);
            return new LongDistinctDateHistogramFacetExecutor(keyIndexFieldData, distinctFieldData, dateTime, interval, comparatorType, context.cacheRecycler());
        } else {
            throw new FacetPhaseExecutionException(facetName, "distinct field [" + distinctField + "] is not of type string or long");
View Full Code Here

      
        if (keyField == null) {
            throw new FacetPhaseExecutionException(facetName, "key field is required to be set for histogram facet, either using [field] or using [key_field]");
        }

        FieldMapper mapper = context.smartNameFieldMapper(keyField);
        if (mapper == null) {
            throw new FacetPhaseExecutionException(facetName, "(key) field [" + keyField + "] not found");
        }
        if (mapper.fieldDataType() != FieldDataType.DefaultTypes.LONG) {
            throw new FacetPhaseExecutionException(facetName, "(key) field [" + keyField + "] is not of type date");
        }

        if (interval == null) {
            throw new FacetPhaseExecutionException(facetName, "[interval] is required to be set for histogram facet");
View Full Code Here

        Analyzer analyzer = new GermanAnalyzer(Version.LUCENE_4_9);
        SearchContext searchContext = mock(SearchContext.class);
        MapperService.SmartNameFieldMappers smartNameFieldMappers = mock(MapperService.SmartNameFieldMappers.class);
        when(searchContext.smartFieldMappers(anyString())).thenReturn(smartNameFieldMappers);
        when(smartNameFieldMappers.hasMapper()).thenReturn(true);
        FieldMapper fieldMapper = mock(FieldMapper.class, Answers.RETURNS_MOCKS.get());
        when(smartNameFieldMappers.mapper()).thenReturn(fieldMapper);
        when(fieldMapper.searchAnalyzer()).thenReturn(analyzer);

        MapperService mapperService = mock(MapperService.class);
        when(searchContext.mapperService()).thenReturn(mapperService);
        when(mapperService.searchAnalyzer()).thenReturn(analyzer);
View Full Code Here

    public void prepare() throws Exception {
        ifd = new IndexFieldDataService(new Index("test"), new NoneCircuitBreakerService());
        ifd.setIndexService(new StubIndexService(null));

        MapperService mapperService = mock(MapperService.class);
        FieldMapper fieldMapper = mock(FieldMapper.class);
        when(fieldMapper.names()).thenReturn(fieldName());
        when(fieldMapper.fieldDataType()).thenReturn(fieldType());
        when(mapperService.smartNameFieldMapper(anyString(), Matchers.<String[]>any())).thenReturn(fieldMapper);


        IndexFieldData<?> fieldData = ifd.getForField(fieldMapper);
        writer = new IndexWriter(new RAMDirectory(),
View Full Code Here

TOP

Related Classes of org.elasticsearch.index.mapper.FieldMapper

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.