Java Code Examples for org.apache.kylin.cube.CubeInstance#setSegments()
The following examples show how to use
org.apache.kylin.cube.CubeInstance#setSegments() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CubeInstanceCreator.java From kylin-on-parquet-v2 with Apache License 2.0 | 5 votes |
public static CubeInstance generateKylinCubeInstance(String owner, String tableName) { CubeInstance cubeInstance = new CubeInstance(); cubeInstance.setName(tableName.replace('.', '_')); cubeInstance.setSegments(new Segments<CubeSegment>()); cubeInstance.setDescName(tableName.replace('.', '_')); cubeInstance.setStatus(RealizationStatusEnum.DISABLED); cubeInstance.setOwner(owner); cubeInstance.setCreateTimeUTC(0L); cubeInstance.updateRandomUuid(); return cubeInstance; }
Example 2
Source File: CubeInstanceCreator.java From kylin with Apache License 2.0 | 5 votes |
public static CubeInstance generateKylinCubeInstance(String owner, String tableName) { CubeInstance cubeInstance = new CubeInstance(); cubeInstance.setName(tableName.replace('.', '_')); cubeInstance.setSegments(new Segments<CubeSegment>()); cubeInstance.setDescName(tableName.replace('.', '_')); cubeInstance.setStatus(RealizationStatusEnum.DISABLED); cubeInstance.setOwner(owner); cubeInstance.setCreateTimeUTC(0L); cubeInstance.updateRandomUuid(); return cubeInstance; }
Example 3
Source File: ExtendCubeToHybridCLI.java From kylin-on-parquet-v2 with Apache License 2.0 | 4 votes |
public void createFromCube(String projectName, String cubeName, String partitionDateStr) throws Exception { logger.info("Create hybrid for cube[" + cubeName + "], project[" + projectName + "], partition_date[" + partitionDateStr + "]."); CubeInstance cubeInstance = cubeManager.getCube(cubeName); if (!validateCubeInstance(cubeInstance)) { return; } CubeDesc cubeDesc = cubeDescManager.getCubeDesc(cubeInstance.getDescName()); DataModelDesc dataModelDesc = metadataManager.getDataModelDesc(cubeDesc.getModelName()); if (StringUtils.isEmpty(dataModelDesc.getPartitionDesc().getPartitionDateColumn())) { logger.error("No incremental cube, no need to extend."); return; } String owner = cubeInstance.getOwner(); long partitionDate = partitionDateStr != null ? DateFormat.stringToMillis(partitionDateStr) : 0; // get new name for old cube and cube_desc String newCubeDescName = renameCube(cubeDesc.getName()); String newCubeInstanceName = renameCube(cubeInstance.getName()); while (cubeDescManager.getCubeDesc(newCubeDescName) != null) newCubeDescName = renameCube(newCubeDescName); while (cubeManager.getCube(newCubeInstanceName) != null) newCubeInstanceName = renameCube(newCubeInstanceName); // create new cube_instance for old segments CubeInstance newCubeInstance = CubeInstance.getCopyOf(cubeInstance); newCubeInstance.setName(newCubeInstanceName); newCubeInstance.setDescName(newCubeDescName); newCubeInstance.updateRandomUuid(); Iterator<CubeSegment> segmentIterator = newCubeInstance.getSegments().iterator(); CubeSegment currentSeg = null; while (segmentIterator.hasNext()) { currentSeg = segmentIterator.next(); if (partitionDateStr != null && (currentSeg.getTSRange().start.v >= partitionDate || currentSeg.getTSRange().end.v > partitionDate)) { segmentIterator.remove(); logger.info("CubeSegment[" + currentSeg + "] was removed."); } } if (currentSeg != null && partitionDateStr != null && partitionDate != currentSeg.getTSRange().end.v) { logger.error("PartitionDate must be end date of one segment."); return; } if (currentSeg != null && partitionDateStr == null) partitionDate = currentSeg.getTSRange().end.v; cubeManager.createCube(newCubeInstance, projectName, owner); logger.info("CubeInstance was saved at: " + newCubeInstance.getResourcePath()); // create new cube for old segments CubeDesc newCubeDesc = CubeDesc.getCopyOf(cubeDesc); newCubeDesc.setName(newCubeDescName); newCubeDesc.updateRandomUuid(); newCubeDesc.init(kylinConfig); newCubeDesc.setPartitionDateEnd(partitionDate); newCubeDesc.calculateSignature(); cubeDescManager.createCubeDesc(newCubeDesc); logger.info("CubeDesc was saved at: " + newCubeDesc.getResourcePath()); // update old cube_desc to new-version metadata cubeDesc.setPartitionDateStart(partitionDate); cubeDesc.setEngineType(IEngineAware.ID_MR_V2); cubeDesc.setStorageType(IStorageAware.ID_SHARDED_HBASE); cubeDesc.calculateSignature(); cubeDescManager.updateCubeDesc(cubeDesc); logger.info("CubeDesc was saved at: " + cubeDesc.getResourcePath()); // clear segments for old cube cubeInstance.setSegments(new Segments()); cubeInstance.setStatus(RealizationStatusEnum.DISABLED); store.checkAndPutResource(cubeInstance.getResourcePath(), cubeInstance, CubeManager.CUBE_SERIALIZER); logger.info("CubeInstance was saved at: " + cubeInstance.getResourcePath()); // create hybrid model for these two cubes List<RealizationEntry> realizationEntries = Lists.newArrayListWithCapacity(2); realizationEntries.add(RealizationEntry.create(RealizationType.CUBE, cubeInstance.getName())); realizationEntries.add(RealizationEntry.create(RealizationType.CUBE, newCubeInstance.getName())); HybridInstance hybridInstance = HybridInstance.create(kylinConfig, renameHybrid(cubeInstance.getName()), realizationEntries); store.checkAndPutResource(hybridInstance.getResourcePath(), hybridInstance, HybridManager.HYBRID_SERIALIZER); ProjectManager.getInstance(kylinConfig).moveRealizationToProject(RealizationType.HYBRID, hybridInstance.getName(), projectName, owner); logger.info("HybridInstance was saved at: " + hybridInstance.getResourcePath()); // copy Acl from old cube to new cube copyAcl(cubeInstance.getId(), newCubeInstance.getId(), projectName); logger.info("Acl copied from [" + cubeName + "] to [" + newCubeInstanceName + "]."); }
Example 4
Source File: ExtendCubeToHybridCLI.java From kylin-on-parquet-v2 with Apache License 2.0 | 4 votes |
public void createFromCube(String projectName, String cubeName, String partitionDateStr) throws Exception { logger.info("Create hybrid for cube[" + cubeName + "], project[" + projectName + "], partition_date[" + partitionDateStr + "]."); CubeInstance cubeInstance = cubeManager.getCube(cubeName); if (!validateCubeInstance(cubeInstance)) { return; } CubeDesc cubeDesc = cubeDescManager.getCubeDesc(cubeInstance.getDescName()); DataModelDesc dataModelDesc = metadataManager.getDataModelDesc(cubeDesc.getModelName()); if (StringUtils.isEmpty(dataModelDesc.getPartitionDesc().getPartitionDateColumn())) { logger.error("No incremental cube, no need to extend."); return; } String owner = cubeInstance.getOwner(); long partitionDate = partitionDateStr != null ? DateFormat.stringToMillis(partitionDateStr) : 0; // get new name for old cube and cube_desc String newCubeDescName = renameCube(cubeDesc.getName()); String newCubeInstanceName = renameCube(cubeInstance.getName()); while (cubeDescManager.getCubeDesc(newCubeDescName) != null) newCubeDescName = renameCube(newCubeDescName); while (cubeManager.getCube(newCubeInstanceName) != null) newCubeInstanceName = renameCube(newCubeInstanceName); // create new cube_instance for old segments CubeInstance newCubeInstance = CubeInstance.getCopyOf(cubeInstance); newCubeInstance.setName(newCubeInstanceName); newCubeInstance.setDescName(newCubeDescName); newCubeInstance.updateRandomUuid(); Iterator<CubeSegment> segmentIterator = newCubeInstance.getSegments().iterator(); CubeSegment currentSeg = null; while (segmentIterator.hasNext()) { currentSeg = segmentIterator.next(); if (partitionDateStr != null && (currentSeg.getTSRange().start.v >= partitionDate || currentSeg.getTSRange().end.v > partitionDate)) { segmentIterator.remove(); logger.info("CubeSegment[" + currentSeg + "] was removed."); } } if (partitionDateStr != null && partitionDate != currentSeg.getTSRange().end.v) { logger.error("PartitionDate must be end date of one segment."); return; } if (currentSeg != null && partitionDateStr == null) partitionDate = currentSeg.getTSRange().end.v; cubeManager.createCube(newCubeInstance, projectName, owner); logger.info("CubeInstance was saved at: " + newCubeInstance.getResourcePath()); // create new cube for old segments CubeDesc newCubeDesc = CubeDesc.getCopyOf(cubeDesc); newCubeDesc.setName(newCubeDescName); newCubeDesc.updateRandomUuid(); newCubeDesc.init(kylinConfig); newCubeDesc.setPartitionDateEnd(partitionDate); newCubeDesc.calculateSignature(); cubeDescManager.createCubeDesc(newCubeDesc); logger.info("CubeDesc was saved at: " + newCubeDesc.getResourcePath()); // update old cube_desc to new-version metadata cubeDesc.setPartitionDateStart(partitionDate); cubeDesc.setEngineType(IEngineAware.ID_MR_V2); cubeDesc.setStorageType(IStorageAware.ID_SHARDED_HBASE); cubeDesc.calculateSignature(); cubeDescManager.updateCubeDesc(cubeDesc); logger.info("CubeDesc was saved at: " + cubeDesc.getResourcePath()); // clear segments for old cube cubeInstance.setSegments(new Segments<CubeSegment>()); cubeInstance.setStatus(RealizationStatusEnum.DISABLED); store.checkAndPutResource(cubeInstance.getResourcePath(), cubeInstance, CubeManager.CUBE_SERIALIZER); logger.info("CubeInstance was saved at: " + cubeInstance.getResourcePath()); // create hybrid model for these two cubes List<RealizationEntry> realizationEntries = Lists.newArrayListWithCapacity(2); realizationEntries.add(RealizationEntry.create(RealizationType.CUBE, cubeInstance.getName())); realizationEntries.add(RealizationEntry.create(RealizationType.CUBE, newCubeInstance.getName())); HybridInstance hybridInstance = HybridInstance.create(kylinConfig, renameHybrid(cubeInstance.getName()), realizationEntries); store.checkAndPutResource(hybridInstance.getResourcePath(), hybridInstance, HybridManager.HYBRID_SERIALIZER); ProjectManager.getInstance(kylinConfig).moveRealizationToProject(RealizationType.HYBRID, hybridInstance.getName(), projectName, owner); logger.info("HybridInstance was saved at: " + hybridInstance.getResourcePath()); // copy Acl from old cube to new cube copyAcl(cubeInstance.getId(), newCubeInstance.getId(), projectName); logger.info("Acl copied from [" + cubeName + "] to [" + newCubeInstanceName + "]."); }
Example 5
Source File: JobStepFactoryTest.java From kylin-on-parquet-v2 with Apache License 2.0 | 4 votes |
@Test public void testAddStepInMerging() throws Exception { CubeManager cubeMgr = CubeManager.getInstance(config); CubeInstance cube = cubeMgr.getCube(CUBE_NAME); cleanupSegments(CUBE_NAME); /** * Round1. Add 2 segment */ CubeSegment segment1 = cubeMgr.appendSegment(cube, new SegmentRange.TSRange(dateToLong("2010-01-01"), dateToLong("2013-01-01"))); CubeSegment segment2 = cubeMgr.appendSegment(cube, new SegmentRange.TSRange(dateToLong("2013-01-01"), dateToLong("2015-01-01"))); segment1.setStatus(SegmentStatusEnum.READY); segment2.setStatus(SegmentStatusEnum.READY); CubeInstance reloadCube = cube.latestCopyForWrite(); Segments segments = new Segments(); segments.add(segment1); segments.add(segment2); reloadCube.setSegments(segments); CubeUpdate update = new CubeUpdate(reloadCube); cubeMgr.updateCube(update); /** * Round2. Merge two segments */ reloadCube = cubeMgr.reloadCube(CUBE_NAME); CubeSegment mergedSegment = cubeMgr.mergeSegments(reloadCube, new SegmentRange.TSRange(dateToLong("2010-01-01"), dateToLong("2015-01-01")) , null, true); NSparkMergingJob job = NSparkMergingJob.merge(mergedSegment, "ADMIN"); Assert.assertEquals(CUBE_NAME, job.getParam(MetadataConstants.P_CUBE_NAME)); NSparkExecutable resourceDetectStep = job.getResourceDetectStep(); Assert.assertEquals(ResourceDetectBeforeMergingJob.class.getName(), resourceDetectStep.getSparkSubmitClassName()); Assert.assertEquals(ExecutableConstants.STEP_NAME_DETECT_RESOURCE, resourceDetectStep.getName()); job.getParams().forEach((key, value) -> Assert.assertEquals(value, resourceDetectStep.getParam(key))); Assert.assertEquals(config.getJobTmpMetaStoreUrl(getProject(), resourceDetectStep.getId()).toString(), resourceDetectStep.getDistMetaUrl()); NSparkExecutable mergeStep = job.getSparkMergingStep(); Assert.assertEquals(config.getSparkMergeClassName(), mergeStep.getSparkSubmitClassName()); Assert.assertEquals(ExecutableConstants.STEP_NAME_MERGER_SPARK_SEGMENT, mergeStep.getName()); job.getParams().forEach((key, value) -> Assert.assertEquals(value, mergeStep.getParam(key))); Assert.assertEquals(config.getJobTmpMetaStoreUrl(getProject(), mergeStep.getId()).toString(), mergeStep.getDistMetaUrl()); CubeInstance cubeInstance = cubeMgr.reloadCube(CUBE_NAME); NSparkUpdateMetaAndCleanupAfterMergeStep cleanStep = job.getCleanUpAfterMergeStep(); job.getParams().forEach((key, value) -> { if (key.equalsIgnoreCase(MetadataConstants.P_SEGMENT_IDS)) { final List<String> needDeleteSegmentNames = cubeInstance.getMergingSegments(mergedSegment).stream() .map(CubeSegment::getName).collect(Collectors.toList()); Assert.assertEquals(needDeleteSegmentNames, Arrays.asList(cleanStep.getParam(MetadataConstants.P_SEGMENT_NAMES).split(","))); } else { Assert.assertEquals(value, mergeStep.getParam(key)); } }); Assert.assertEquals(config.getJobTmpMetaStoreUrl(getProject(), cleanStep.getId()).toString(), cleanStep.getDistMetaUrl()); }
Example 6
Source File: ExtendCubeToHybridCLI.java From kylin with Apache License 2.0 | 4 votes |
public void createFromCube(String projectName, String cubeName, String partitionDateStr) throws Exception { logger.info("Create hybrid for cube[" + cubeName + "], project[" + projectName + "], partition_date[" + partitionDateStr + "]."); CubeInstance cubeInstance = cubeManager.getCube(cubeName); if (!validateCubeInstance(cubeInstance)) { return; } CubeDesc cubeDesc = cubeDescManager.getCubeDesc(cubeInstance.getDescName()); DataModelDesc dataModelDesc = metadataManager.getDataModelDesc(cubeDesc.getModelName()); if (StringUtils.isEmpty(dataModelDesc.getPartitionDesc().getPartitionDateColumn())) { logger.error("No incremental cube, no need to extend."); return; } String owner = cubeInstance.getOwner(); long partitionDate = partitionDateStr != null ? DateFormat.stringToMillis(partitionDateStr) : 0; // get new name for old cube and cube_desc String newCubeDescName = renameCube(cubeDesc.getName()); String newCubeInstanceName = renameCube(cubeInstance.getName()); while (cubeDescManager.getCubeDesc(newCubeDescName) != null) newCubeDescName = renameCube(newCubeDescName); while (cubeManager.getCube(newCubeInstanceName) != null) newCubeInstanceName = renameCube(newCubeInstanceName); // create new cube_instance for old segments CubeInstance newCubeInstance = CubeInstance.getCopyOf(cubeInstance); newCubeInstance.setName(newCubeInstanceName); newCubeInstance.setDescName(newCubeDescName); newCubeInstance.updateRandomUuid(); Iterator<CubeSegment> segmentIterator = newCubeInstance.getSegments().iterator(); CubeSegment currentSeg = null; while (segmentIterator.hasNext()) { currentSeg = segmentIterator.next(); if (partitionDateStr != null && (currentSeg.getTSRange().start.v >= partitionDate || currentSeg.getTSRange().end.v > partitionDate)) { segmentIterator.remove(); logger.info("CubeSegment[" + currentSeg + "] was removed."); } } if (currentSeg != null && partitionDateStr != null && partitionDate != currentSeg.getTSRange().end.v) { logger.error("PartitionDate must be end date of one segment."); return; } if (currentSeg != null && partitionDateStr == null) partitionDate = currentSeg.getTSRange().end.v; cubeManager.createCube(newCubeInstance, projectName, owner); logger.info("CubeInstance was saved at: " + newCubeInstance.getResourcePath()); // create new cube for old segments CubeDesc newCubeDesc = CubeDesc.getCopyOf(cubeDesc); newCubeDesc.setName(newCubeDescName); newCubeDesc.updateRandomUuid(); newCubeDesc.init(kylinConfig); newCubeDesc.setPartitionDateEnd(partitionDate); newCubeDesc.calculateSignature(); cubeDescManager.createCubeDesc(newCubeDesc); logger.info("CubeDesc was saved at: " + newCubeDesc.getResourcePath()); // update old cube_desc to new-version metadata cubeDesc.setPartitionDateStart(partitionDate); cubeDesc.setEngineType(IEngineAware.ID_MR_V2); cubeDesc.setStorageType(IStorageAware.ID_SHARDED_HBASE); cubeDesc.calculateSignature(); cubeDescManager.updateCubeDesc(cubeDesc); logger.info("CubeDesc was saved at: " + cubeDesc.getResourcePath()); // clear segments for old cube cubeInstance.setSegments(new Segments()); cubeInstance.setStatus(RealizationStatusEnum.DISABLED); store.checkAndPutResource(cubeInstance.getResourcePath(), cubeInstance, CubeManager.CUBE_SERIALIZER); logger.info("CubeInstance was saved at: " + cubeInstance.getResourcePath()); // create hybrid model for these two cubes List<RealizationEntry> realizationEntries = Lists.newArrayListWithCapacity(2); realizationEntries.add(RealizationEntry.create(RealizationType.CUBE, cubeInstance.getName())); realizationEntries.add(RealizationEntry.create(RealizationType.CUBE, newCubeInstance.getName())); HybridInstance hybridInstance = HybridInstance.create(kylinConfig, renameHybrid(cubeInstance.getName()), realizationEntries); store.checkAndPutResource(hybridInstance.getResourcePath(), hybridInstance, HybridManager.HYBRID_SERIALIZER); ProjectManager.getInstance(kylinConfig).moveRealizationToProject(RealizationType.HYBRID, hybridInstance.getName(), projectName, owner); logger.info("HybridInstance was saved at: " + hybridInstance.getResourcePath()); // copy Acl from old cube to new cube copyAcl(cubeInstance.getId(), newCubeInstance.getId(), projectName); logger.info("Acl copied from [" + cubeName + "] to [" + newCubeInstanceName + "]."); }
Example 7
Source File: ExtendCubeToHybridCLI.java From kylin with Apache License 2.0 | 4 votes |
public void createFromCube(String projectName, String cubeName, String partitionDateStr) throws Exception { logger.info("Create hybrid for cube[" + cubeName + "], project[" + projectName + "], partition_date[" + partitionDateStr + "]."); CubeInstance cubeInstance = cubeManager.getCube(cubeName); if (!validateCubeInstance(cubeInstance)) { return; } CubeDesc cubeDesc = cubeDescManager.getCubeDesc(cubeInstance.getDescName()); DataModelDesc dataModelDesc = metadataManager.getDataModelDesc(cubeDesc.getModelName()); if (StringUtils.isEmpty(dataModelDesc.getPartitionDesc().getPartitionDateColumn())) { logger.error("No incremental cube, no need to extend."); return; } String owner = cubeInstance.getOwner(); long partitionDate = partitionDateStr != null ? DateFormat.stringToMillis(partitionDateStr) : 0; // get new name for old cube and cube_desc String newCubeDescName = renameCube(cubeDesc.getName()); String newCubeInstanceName = renameCube(cubeInstance.getName()); while (cubeDescManager.getCubeDesc(newCubeDescName) != null) newCubeDescName = renameCube(newCubeDescName); while (cubeManager.getCube(newCubeInstanceName) != null) newCubeInstanceName = renameCube(newCubeInstanceName); // create new cube_instance for old segments CubeInstance newCubeInstance = CubeInstance.getCopyOf(cubeInstance); newCubeInstance.setName(newCubeInstanceName); newCubeInstance.setDescName(newCubeDescName); newCubeInstance.updateRandomUuid(); Iterator<CubeSegment> segmentIterator = newCubeInstance.getSegments().iterator(); CubeSegment currentSeg = null; while (segmentIterator.hasNext()) { currentSeg = segmentIterator.next(); if (partitionDateStr != null && (currentSeg.getTSRange().start.v >= partitionDate || currentSeg.getTSRange().end.v > partitionDate)) { segmentIterator.remove(); logger.info("CubeSegment[" + currentSeg + "] was removed."); } } if (partitionDateStr != null && partitionDate != currentSeg.getTSRange().end.v) { logger.error("PartitionDate must be end date of one segment."); return; } if (currentSeg != null && partitionDateStr == null) partitionDate = currentSeg.getTSRange().end.v; cubeManager.createCube(newCubeInstance, projectName, owner); logger.info("CubeInstance was saved at: " + newCubeInstance.getResourcePath()); // create new cube for old segments CubeDesc newCubeDesc = CubeDesc.getCopyOf(cubeDesc); newCubeDesc.setName(newCubeDescName); newCubeDesc.updateRandomUuid(); newCubeDesc.init(kylinConfig); newCubeDesc.setPartitionDateEnd(partitionDate); newCubeDesc.calculateSignature(); cubeDescManager.createCubeDesc(newCubeDesc); logger.info("CubeDesc was saved at: " + newCubeDesc.getResourcePath()); // update old cube_desc to new-version metadata cubeDesc.setPartitionDateStart(partitionDate); cubeDesc.setEngineType(IEngineAware.ID_MR_V2); cubeDesc.setStorageType(IStorageAware.ID_SHARDED_HBASE); cubeDesc.calculateSignature(); cubeDescManager.updateCubeDesc(cubeDesc); logger.info("CubeDesc was saved at: " + cubeDesc.getResourcePath()); // clear segments for old cube cubeInstance.setSegments(new Segments<CubeSegment>()); cubeInstance.setStatus(RealizationStatusEnum.DISABLED); store.checkAndPutResource(cubeInstance.getResourcePath(), cubeInstance, CubeManager.CUBE_SERIALIZER); logger.info("CubeInstance was saved at: " + cubeInstance.getResourcePath()); // create hybrid model for these two cubes List<RealizationEntry> realizationEntries = Lists.newArrayListWithCapacity(2); realizationEntries.add(RealizationEntry.create(RealizationType.CUBE, cubeInstance.getName())); realizationEntries.add(RealizationEntry.create(RealizationType.CUBE, newCubeInstance.getName())); HybridInstance hybridInstance = HybridInstance.create(kylinConfig, renameHybrid(cubeInstance.getName()), realizationEntries); store.checkAndPutResource(hybridInstance.getResourcePath(), hybridInstance, HybridManager.HYBRID_SERIALIZER); ProjectManager.getInstance(kylinConfig).moveRealizationToProject(RealizationType.HYBRID, hybridInstance.getName(), projectName, owner); logger.info("HybridInstance was saved at: " + hybridInstance.getResourcePath()); // copy Acl from old cube to new cube copyAcl(cubeInstance.getId(), newCubeInstance.getId(), projectName); logger.info("Acl copied from [" + cubeName + "] to [" + newCubeInstanceName + "]."); }