@@ -33,7 +33,16 @@ import zio.elasticsearch.query.sort.SortOrder._
33
33
import zio .elasticsearch .query .sort .SourceType .NumberType
34
34
import zio .elasticsearch .query .{Distance , FunctionScoreBoostMode , FunctionScoreFunction , InnerHits }
35
35
import zio .elasticsearch .request .{CreationOutcome , DeletionOutcome }
36
- import zio .elasticsearch .result .{FilterAggregationResult , Item , MaxAggregationResult , UpdateByQueryResult }
36
+ import zio .elasticsearch .result .{
37
+ FilterAggregationResult ,
38
+ Item ,
39
+ MaxAggregationResult ,
40
+ SamplerAggregationResult ,
41
+ SumAggregationResult ,
42
+ TermsAggregationBucketResult ,
43
+ TermsAggregationResult ,
44
+ UpdateByQueryResult
45
+ }
37
46
import zio .elasticsearch .script .{Painless , Script }
38
47
import zio .json .ast .Json .{Arr , Str }
39
48
import zio .schema .codec .JsonCodec
@@ -408,6 +417,74 @@ object HttpExecutorSpec extends IntegrationSpec {
408
417
Executor .execute(ElasticRequest .createIndex(firstSearchIndex)),
409
418
Executor .execute(ElasticRequest .deleteIndex(firstSearchIndex)).orDie
410
419
),
420
+ test(" aggregate using sampler aggregation with sum and terms sub aggregations" ) {
421
+ (
422
+ " sampler_agg" ,
423
+ SamplerAggregationResult (
424
+ docCount = 4 ,
425
+ subAggregations = Map (
426
+ " total_sum_field" -> SumAggregationResult (value = 50.0 ),
427
+ " string_categories" -> TermsAggregationResult (
428
+ docErrorCount = 0 ,
429
+ sumOtherDocCount = 0 ,
430
+ buckets = Chunk (
431
+ TermsAggregationBucketResult (key = " abc" , docCount = 1 , subAggregations = Map .empty),
432
+ TermsAggregationBucketResult (key = " def" , docCount = 1 , subAggregations = Map .empty),
433
+ TermsAggregationBucketResult (key = " ghi" , docCount = 1 , subAggregations = Map .empty),
434
+ TermsAggregationBucketResult (key = " jkl" , docCount = 1 , subAggregations = Map .empty)
435
+ )
436
+ )
437
+ )
438
+ )
439
+ )
440
+ checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) {
441
+ (docIdA, docA, docIdB, docB, docIdC, docC) =>
442
+ for {
443
+ _ <- Executor .execute(ElasticRequest .deleteByQuery(firstSearchIndex, matchAll))
444
+ documentA = docA.copy(stringField = " abc" , intField = 10 )
445
+ documentB = docB.copy(stringField = " def" , intField = 20 )
446
+ documentC = docC.copy(stringField = " ghi" , intField = 15 )
447
+ _ <- Executor .execute(ElasticRequest .upsert[TestDocument ](firstSearchIndex, docIdA, documentA))
448
+ _ <- Executor .execute(ElasticRequest .upsert[TestDocument ](firstSearchIndex, docIdB, documentB))
449
+ _ <- Executor .execute(
450
+ ElasticRequest .upsert[TestDocument ](firstSearchIndex, docIdC, documentC).refreshTrue
451
+ )
452
+ aggregation = samplerAggregation(
453
+ " sampler_agg" ,
454
+ sumAggregation(" total_sum_field" , TestDocument .intField)
455
+ ).withSubAgg(termsAggregation(" string_categories" , TestDocument .stringField.keyword))
456
+ .maxDocumentsPerShard(100 )
457
+ aggsRes <-
458
+ Executor
459
+ .execute(ElasticRequest .aggregate(selectors = firstSearchIndex, aggregation = aggregation))
460
+ .aggregations
461
+ .map(_.head)
462
+
463
+ expectedResult =
464
+ (
465
+ " sampler_agg" ,
466
+ SamplerAggregationResult (
467
+ docCount = 3 ,
468
+ subAggregations = Map (
469
+ " total_sum_field" -> SumAggregationResult (value = 45.0 ),
470
+ " string_categories" -> TermsAggregationResult (
471
+ docErrorCount = 0 ,
472
+ sumOtherDocCount = 0 ,
473
+ buckets = Chunk (
474
+ TermsAggregationBucketResult (key = " abc" , docCount = 1 , subAggregations = Map .empty),
475
+ TermsAggregationBucketResult (key = " def" , docCount = 1 , subAggregations = Map .empty),
476
+ TermsAggregationBucketResult (key = " ghi" , docCount = 1 , subAggregations = Map .empty)
477
+ )
478
+ )
479
+ )
480
+ )
481
+ )
482
+ } yield assert(aggsRes)(equalTo(expectedResult))
483
+ }
484
+ } @@ around(
485
+ Executor .execute(ElasticRequest .createIndex(firstSearchIndex)),
486
+ Executor .execute(ElasticRequest .deleteIndex(firstSearchIndex)).orDie
487
+ ),
411
488
test(" aggregate using stats aggregation" ) {
412
489
checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) {
413
490
(firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) =>
@@ -795,6 +872,57 @@ object HttpExecutorSpec extends IntegrationSpec {
795
872
Executor .execute(ElasticRequest .createIndex(firstSearchIndex)),
796
873
Executor .execute(ElasticRequest .deleteIndex(firstSearchIndex)).orDie
797
874
),
875
+ test(" search using sampler aggregation" ) {
876
+ val expectedAggResult = SamplerAggregationResult (
877
+ docCount = 2 ,
878
+ subAggregations = Map (
879
+ " sampled_strings" -> TermsAggregationResult (
880
+ docErrorCount = 0 ,
881
+ sumOtherDocCount = 0 ,
882
+ buckets = Chunk (
883
+ TermsAggregationBucketResult (key = " zio" , docCount = 1 , subAggregations = Map .empty),
884
+ TermsAggregationBucketResult (key = " zio-elasticsearch" , docCount = 1 , subAggregations = Map .empty)
885
+ )
886
+ )
887
+ )
888
+ )
889
+ checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) {
890
+ (docIdA, docA, docIdB, docB, docIdC, docC) =>
891
+ val documentA = docA.copy(stringField = " zio" )
892
+ val documentB = docB.copy(stringField = " elasticsearch" )
893
+ val documentC = docC.copy(stringField = " zio-elasticsearch" )
894
+ val expectedSearchDocs = Chunk (documentA, documentC)
895
+ for {
896
+ _ <- Executor .execute(ElasticRequest .deleteByQuery(firstSearchIndex, matchAll))
897
+ _ <- Executor .execute(ElasticRequest .upsert[TestDocument ](firstSearchIndex, docIdA, documentA))
898
+ _ <- Executor .execute(ElasticRequest .upsert[TestDocument ](firstSearchIndex, docIdB, documentB))
899
+ _ <- Executor .execute(
900
+ ElasticRequest .upsert[TestDocument ](firstSearchIndex, docIdC, documentC).refreshTrue
901
+ )
902
+ searchQuery = matches(TestDocument .stringField, " zio" )
903
+ aggregation = samplerAggregation(
904
+ " sampler_agg" ,
905
+ termsAggregation(" sampled_strings" , TestDocument .stringField.keyword)
906
+ )
907
+ .maxDocumentsPerShard(2 )
908
+ res <- Executor .execute(
909
+ ElasticRequest
910
+ .search(
911
+ selectors = firstSearchIndex,
912
+ query = searchQuery,
913
+ aggregation = aggregation
914
+ )
915
+ )
916
+ docs <- res.documentAs[TestDocument ]
917
+ samplerAgg <- res.aggregation(" sampler_agg" )
918
+ } yield assert(docs.length)(equalTo(2 )) &&
919
+ assert(docs.toSet)(equalTo(expectedSearchDocs.toSet)) &&
920
+ assert(samplerAgg)(isSome(equalTo(expectedAggResult)))
921
+ }
922
+ } @@ around(
923
+ Executor .execute(ElasticRequest .createIndex(firstSearchIndex)),
924
+ Executor .execute(ElasticRequest .deleteIndex(firstSearchIndex)).orDie
925
+ ),
798
926
test(
799
927
" search using match all query with terms aggregations, nested max aggregation and nested bucketSelector aggregation"
800
928
) {
0 commit comments