org.apache.kylin.cube.CubeDescManager Java Examples
The following examples show how to use
org.apache.kylin.cube.CubeDescManager.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: JobControllerTest.java From kylin with Apache License 2.0 | 6 votes |
@Before public void setup() throws Exception { super.setup(); jobSchedulerController = new JobController(); jobSchedulerController.setJobService(jobService); cubeController = new CubeController(); cubeController.setJobService(jobService); cubeController.setCubeService(cubeService); KylinConfig testConfig = getTestConfig(); cubeManager = CubeManager.getInstance(testConfig); cubeDescManager = CubeDescManager.getInstance(testConfig); executableDAO = ExecutableDao.getInstance(testConfig); }
Example #2
Source File: CubeMetadataUpgrade.java From Kylin with Apache License 2.0 | 6 votes |
private void upgradeCubeDesc() { logger.info("Reloading Cube Metadata from folder " + store.getReadableResourcePath(ResourceStore.CUBE_DESC_RESOURCE_ROOT)); List<String> paths = listResourceStore(ResourceStore.CUBE_DESC_RESOURCE_ROOT); for (String path : paths) { try { CubeDescUpgrader upgrade = new CubeDescUpgrader(path); CubeDesc ndesc = upgrade.upgrade(); ndesc.setSignature(ndesc.calculateSignature()); getStore().putResource(ndesc.getModel().getResourcePath(), ndesc.getModel(), MetadataManager.MODELDESC_SERIALIZER); getStore().putResource(ndesc.getResourcePath(), ndesc, CubeDescManager.CUBE_DESC_SERIALIZER); updatedResources.add(ndesc.getResourcePath()); } catch (IOException e) { e.printStackTrace(); errorMsgs.add("Upgrade CubeDesc at '" + path + "' failed: " + e.getLocalizedMessage()); } } }
Example #3
Source File: BuildCubeWithEngine.java From kylin with Apache License 2.0 | 6 votes |
public void before() throws Exception { deployEnv(); final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); jobService = ExecutableManager.getInstance(kylinConfig); scheduler = DefaultScheduler.createInstance(); scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock()); if (!scheduler.hasStarted()) { throw new RuntimeException("scheduler has not been started"); } cubeManager = CubeManager.getInstance(kylinConfig); for (String jobId : jobService.getAllJobIds()) { AbstractExecutable executable = jobService.getJob(jobId); if (executable instanceof CubingJob || executable instanceof CheckpointExecutable) { jobService.deleteJob(jobId); } } cubeDescManager = CubeDescManager.getInstance(kylinConfig); // update enginType updateCubeEngineType(Lists.newArrayList("ci_inner_join_cube", "ci_left_join_cube")); }
Example #4
Source File: JobControllerTest.java From kylin-on-parquet-v2 with Apache License 2.0 | 6 votes |
@Before public void setup() throws Exception { super.setup(); jobSchedulerController = new JobController(); jobSchedulerController.setJobService(jobService); cubeController = new CubeController(); cubeController.setJobService(jobService); cubeController.setCubeService(cubeService); KylinConfig testConfig = getTestConfig(); cubeManager = CubeManager.getInstance(testConfig); cubeDescManager = CubeDescManager.getInstance(testConfig); executableDAO = ExecutableDao.getInstance(testConfig); }
Example #5
Source File: JdbcHiveMRInputTest.java From kylin with Apache License 2.0 | 6 votes |
@Test public void testGenSqoopCmd_Partition() throws IOException { ISource source = SourceManager.getSource(new JdbcSourceAware()); IMRInput input = source.adaptToBuildEngine(IMRInput.class); Assert.assertNotNull(input); CubeManager cubeManager = CubeManager.getInstance(getTestConfig()); CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("ci_inner_join_cube"); CubeSegment seg = cubeManager.appendSegment(cubeManager.getCube(cubeDesc.getName()), new SegmentRange.TSRange(System.currentTimeMillis() - 100L, System.currentTimeMillis() + 100L)); CubeJoinedFlatTableDesc flatDesc = new CubeJoinedFlatTableDesc(seg); JdbcHiveMRInput.JdbcMRBatchCubingInputSide inputSide = (JdbcHiveMRInput.JdbcMRBatchCubingInputSide) input .getBatchCubingInputSide(flatDesc); AbstractExecutable executable = new MockInputSide(flatDesc, inputSide).createSqoopToFlatHiveStep("/tmp", cubeDesc.getName()); Assert.assertNotNull(executable); String cmd = executable.getParam("cmd"); Assert.assertTrue(cmd.contains("org.h2.Driver")); Assert.assertTrue(cmd.contains( "--boundary-query \"SELECT MIN(\\\"TEST_KYLIN_FACT\\\".\\\"LEAF_CATEG_ID\\\"), MAX(\\\"TEST_KYLIN_FACT\\\".\\\"LEAF_CATEG_ID\\\")" + System.lineSeparator() + "FROM \\\"DEFAULT\\\".\\\"TEST_KYLIN_FACT\\\" AS \\\"TEST_KYLIN_FACT\\\"")); source.close(); }
Example #6
Source File: JdbcHiveMRInputTest.java From kylin-on-parquet-v2 with Apache License 2.0 | 6 votes |
@Test public void testGenSqoopCmd_WithLookupShardBy() throws IOException { ISource source = SourceManager.getSource(new JdbcSourceAware()); IMRInput input = source.adaptToBuildEngine(IMRInput.class); Assert.assertNotNull(input); CubeManager cubeManager = CubeManager.getInstance(getTestConfig()); CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("ut_jdbc_shard"); CubeSegment seg = cubeManager.appendSegment(cubeManager.getCube(cubeDesc.getName()), new SegmentRange.TSRange(System.currentTimeMillis() - 100L, System.currentTimeMillis() + 100L)); CubeJoinedFlatTableDesc flatDesc = new CubeJoinedFlatTableDesc(seg); JdbcHiveMRInput.JdbcMRBatchCubingInputSide inputSide = (JdbcHiveMRInput.JdbcMRBatchCubingInputSide) input .getBatchCubingInputSide(flatDesc); AbstractExecutable executable = new MockInputSide(flatDesc, inputSide).createSqoopToFlatHiveStep("/tmp", cubeDesc.getName()); Assert.assertNotNull(executable); String cmd = executable.getParam("cmd"); Assert.assertTrue(cmd.contains("org.h2.Driver")); Assert.assertTrue(cmd.contains( "--boundary-query \"SELECT MIN(\\\"TEST_CATEGORY_GROUPINGS\\\".\\\"META_CATEG_NAME\\\"), MAX(\\\"TEST_CATEGORY_GROUPINGS\\\".\\\"META_CATEG_NAME\\\")" + System.lineSeparator() + "FROM \\\"DEFAULT\\\".\\\"TEST_CATEGORY_GROUPINGS\\\" AS \\\"TEST_CATEGORY_GROUPINGS\\\"\"")); source.close(); }
Example #7
Source File: JdbcHiveMRInputTest.java From kylin with Apache License 2.0 | 6 votes |
@Test public void testGenSqoopCmd_NoPartition() throws IOException { ISource source = SourceManager.getSource(new JdbcSourceAware()); IMRInput input = source.adaptToBuildEngine(IMRInput.class); Assert.assertNotNull(input); CubeManager cubeManager = CubeManager.getInstance(getTestConfig()); CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("ci_left_join_cube"); CubeSegment seg = cubeManager.appendSegment(cubeManager.getCube(cubeDesc.getName()), new SegmentRange.TSRange(0L, Long.MAX_VALUE)); CubeJoinedFlatTableDesc flatDesc = new CubeJoinedFlatTableDesc(seg); JdbcHiveMRInput.JdbcMRBatchCubingInputSide inputSide = (JdbcHiveMRInput.JdbcMRBatchCubingInputSide) input .getBatchCubingInputSide(flatDesc); AbstractExecutable executable = new MockInputSide(flatDesc, inputSide).createSqoopToFlatHiveStep("/tmp", cubeDesc.getName()); Assert.assertNotNull(executable); String cmd = executable.getParam("cmd"); Assert.assertTrue(cmd.contains("org.h2.Driver")); Assert.assertTrue( cmd.contains("--boundary-query \"SELECT MIN(\\\"TEST_KYLIN_FACT\\\".\\\"CAL_DT\\\"), MAX(\\\"TEST_KYLIN_FACT\\\".\\\"CAL_DT\\\")" + System.lineSeparator() + "FROM \\\"DEFAULT\\\".\\\"TEST_KYLIN_FACT\\\" AS \\\"TEST_KYLIN_FACT\\\"\"")); source.close(); }
Example #8
Source File: JdbcHiveMRInputTest.java From kylin-on-parquet-v2 with Apache License 2.0 | 6 votes |
@Test public void testGenSqoopCmd_Partition() throws IOException { ISource source = SourceManager.getSource(new JdbcSourceAware()); IMRInput input = source.adaptToBuildEngine(IMRInput.class); Assert.assertNotNull(input); CubeManager cubeManager = CubeManager.getInstance(getTestConfig()); CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("ci_inner_join_cube"); CubeSegment seg = cubeManager.appendSegment(cubeManager.getCube(cubeDesc.getName()), new SegmentRange.TSRange(System.currentTimeMillis() - 100L, System.currentTimeMillis() + 100L)); CubeJoinedFlatTableDesc flatDesc = new CubeJoinedFlatTableDesc(seg); JdbcHiveMRInput.JdbcMRBatchCubingInputSide inputSide = (JdbcHiveMRInput.JdbcMRBatchCubingInputSide) input .getBatchCubingInputSide(flatDesc); AbstractExecutable executable = new MockInputSide(flatDesc, inputSide).createSqoopToFlatHiveStep("/tmp", cubeDesc.getName()); Assert.assertNotNull(executable); String cmd = executable.getParam("cmd"); Assert.assertTrue(cmd.contains("org.h2.Driver")); Assert.assertTrue(cmd.contains( "--boundary-query \"SELECT MIN(\\\"TEST_KYLIN_FACT\\\".\\\"LEAF_CATEG_ID\\\"), MAX(\\\"TEST_KYLIN_FACT\\\".\\\"LEAF_CATEG_ID\\\")" + System.lineSeparator() + "FROM \\\"DEFAULT\\\".\\\"TEST_KYLIN_FACT\\\" AS \\\"TEST_KYLIN_FACT\\\"")); source.close(); }
Example #9
Source File: MetadataUpgradeTest.java From Kylin with Apache License 2.0 | 6 votes |
private void checkCubeDesc(String descName) { CubeDescManager cubeDescMgr = CubeDescManager.getInstance(KylinConfig.getInstanceFromEnv()); CubeDesc cubedesc1 = cubeDescMgr.getCubeDesc(descName); Assert.assertNotNull(cubedesc1); DataModelDesc model = cubedesc1.getModel(); Assert.assertNotNull(model); Assert.assertTrue(model.getLookups().length > 0); List<DimensionDesc> dims = cubedesc1.getDimensions(); Assert.assertTrue(dims.size() > 0); for (DimensionDesc dim : dims) { Assert.assertTrue(dim.getColumn().length > 0); } Assert.assertTrue(cubedesc1.getMeasures().size() > 0); CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()); List<CubeInstance> cubes = cubeMgr.getCubesByDesc(descName); Assert.assertTrue(cubes.size() > 0); }
Example #10
Source File: JdbcHiveMRInputTest.java From kylin with Apache License 2.0 | 6 votes |
@Test public void testGenSqoopCmd_WithLookupShardBy() throws IOException { ISource source = SourceManager.getSource(new JdbcSourceAware()); IMRInput input = source.adaptToBuildEngine(IMRInput.class); Assert.assertNotNull(input); CubeManager cubeManager = CubeManager.getInstance(getTestConfig()); CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("ut_jdbc_shard"); CubeSegment seg = cubeManager.appendSegment(cubeManager.getCube(cubeDesc.getName()), new SegmentRange.TSRange(System.currentTimeMillis() - 100L, System.currentTimeMillis() + 100L)); CubeJoinedFlatTableDesc flatDesc = new CubeJoinedFlatTableDesc(seg); JdbcHiveMRInput.JdbcMRBatchCubingInputSide inputSide = (JdbcHiveMRInput.JdbcMRBatchCubingInputSide) input .getBatchCubingInputSide(flatDesc); AbstractExecutable executable = new MockInputSide(flatDesc, inputSide).createSqoopToFlatHiveStep("/tmp", cubeDesc.getName()); Assert.assertNotNull(executable); String cmd = executable.getParam("cmd"); Assert.assertTrue(cmd.contains("org.h2.Driver")); Assert.assertTrue(cmd.contains( "--boundary-query \"SELECT MIN(\\\"TEST_CATEGORY_GROUPINGS\\\".\\\"META_CATEG_NAME\\\"), MAX(\\\"TEST_CATEGORY_GROUPINGS\\\".\\\"META_CATEG_NAME\\\")" + System.lineSeparator() + "FROM \\\"DEFAULT\\\".\\\"TEST_CATEGORY_GROUPINGS\\\" AS \\\"TEST_CATEGORY_GROUPINGS\\\"\"")); source.close(); }
Example #11
Source File: MergeDictionaryMapper.java From kylin with Apache License 2.0 | 6 votes |
@Override protected void doSetup(Context context) throws IOException, InterruptedException { super.doSetup(context); final SerializableConfiguration sConf = new SerializableConfiguration(context.getConfiguration()); final String metaUrl = context.getConfiguration().get(BatchConstants.ARG_META_URL); final String cubeName = context.getConfiguration().get(BatchConstants.ARG_CUBE_NAME); final String segmentIds = context.getConfiguration().get(MergeDictionaryJob.OPTION_MERGE_SEGMENT_IDS.getOpt()); final KylinConfig kylinConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(sConf, metaUrl); final CubeInstance cubeInstance = CubeManager.getInstance(kylinConfig).getCube(cubeName); final CubeDesc cubeDesc = CubeDescManager.getInstance(kylinConfig).getCubeDesc(cubeInstance.getDescName()); mergingSegments = getMergingSegments(cubeInstance, StringUtil.splitByComma(segmentIds)); tblColRefs = cubeDesc.getAllColumnsNeedDictionaryBuilt().toArray(new TblColRef[0]); dictMgr = DictionaryManager.getInstance(kylinConfig); }
Example #12
Source File: TableSchemaUpdaterTest.java From kylin with Apache License 2.0 | 6 votes |
@Test public void testDealWithMappingForCubeDesc() throws IOException { CubeDescManager cubeDescManager = CubeDescManager.getInstance(getTestConfig()); CubeDesc cubeDesc = cubeDescManager.getCubeDesc("ci_left_join_cube"); CubeDesc updated = TableSchemaUpdater.dealWithMappingForCubeDesc(cubeDesc, mappings); updated = reinit(updated, cubeDescManager.CUBE_DESC_SERIALIZER); try (DataInputStream bis = new DataInputStream( new FileInputStream(new File(mappingRootPath + CubeDesc.concatResourcePath(updated.getName()))))) { CubeDesc expected = cubeDescManager.CUBE_DESC_SERIALIZER.deserialize(bis); Assert.assertTrue(expected.equalsRaw(updated)); } catch (Exception e) { Assert.fail("CubeDesc is not updated correctly"); } }
Example #13
Source File: TopNMeasureTypeTest.java From kylin-on-parquet-v2 with Apache License 2.0 | 6 votes |
@Test public void test() { CubeDesc desc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("test_kylin_cube_without_slr_left_join_desc"); MeasureDesc topSellerMeasure = null; for (MeasureDesc measureDesc : desc.getMeasures()) { if (measureDesc.getName().equals("TOP_SELLER")) { topSellerMeasure = measureDesc; break; } } TopNMeasureType measureType = (TopNMeasureType) MeasureTypeFactory.create(topSellerMeasure.getFunction().getExpression(), topSellerMeasure.getFunction().getReturnDataType()); topSellerMeasure.getFunction().getConfiguration().clear(); List<TblColRef> colsNeedDict = measureType.getColumnsNeedDictionary(topSellerMeasure.getFunction()); assertTrue(colsNeedDict != null && colsNeedDict.size() == 1); TblColRef sellerColRef = topSellerMeasure.getFunction().getParameter().getColRefs().get(1); topSellerMeasure.getFunction().getConfiguration().put(TopNMeasureType.CONFIG_ENCODING_PREFIX + sellerColRef.getIdentity(), "int:6"); colsNeedDict = measureType.getColumnsNeedDictionary(topSellerMeasure.getFunction()); assertTrue(colsNeedDict.size() == 0); }
Example #14
Source File: JobControllerTest.java From Kylin with Apache License 2.0 | 6 votes |
@Before public void setup() throws Exception { super.setUp(); jobSchedulerController = new JobController(); jobSchedulerController.setJobService(jobService); cubeController = new CubeController(); cubeController.setJobService(jobService); cubeController.setCubeService(cubeService); KylinConfig testConfig = getTestConfig(); cubeManager = CubeManager.getInstance(testConfig); cubeDescManager = CubeDescManager.getInstance(testConfig); executableDAO = ExecutableDao.getInstance(testConfig); }
Example #15
Source File: CubeSignatureRefresher.java From kylin-on-parquet-v2 with Apache License 2.0 | 6 votes |
public void update() { logger.info("Reloading Cube Metadata from store: " + store.getReadableResourcePath(ResourceStore.CUBE_DESC_RESOURCE_ROOT)); CubeDescManager cubeDescManager = CubeDescManager.getInstance(config); List<CubeDesc> cubeDescs; if (ArrayUtils.isEmpty(cubeNames)) { cubeDescs = cubeDescManager.listAllDesc(); } else { String[] names = StringUtil.splitByComma(cubeNames[0]); if (ArrayUtils.isEmpty(names)) return; cubeDescs = Lists.newArrayListWithCapacity(names.length); for (String name : names) { cubeDescs.add(cubeDescManager.getCubeDesc(name)); } } for (CubeDesc cubeDesc : cubeDescs) { updateCubeDesc(cubeDesc); } verify(); }
Example #16
Source File: CubeSignatureRefresher.java From kylin with Apache License 2.0 | 6 votes |
public void update() { logger.info("Reloading Cube Metadata from store: " + store.getReadableResourcePath(ResourceStore.CUBE_DESC_RESOURCE_ROOT)); CubeDescManager cubeDescManager = CubeDescManager.getInstance(config); List<CubeDesc> cubeDescs; if (ArrayUtils.isEmpty(cubeNames)) { cubeDescs = cubeDescManager.listAllDesc(); } else { String[] names = StringUtil.splitByComma(cubeNames[0]); if (ArrayUtils.isEmpty(names)) return; cubeDescs = Lists.newArrayListWithCapacity(names.length); for (String name : names) { cubeDescs.add(cubeDescManager.getCubeDesc(name)); } } for (CubeDesc cubeDesc : cubeDescs) { updateCubeDesc(cubeDesc); } verify(); }
Example #17
Source File: TopNMeasureTypeTest.java From kylin with Apache License 2.0 | 6 votes |
@Test public void test() { CubeDesc desc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("test_kylin_cube_without_slr_left_join_desc"); MeasureDesc topSellerMeasure = null; for (MeasureDesc measureDesc : desc.getMeasures()) { if (measureDesc.getName().equals("TOP_SELLER")) { topSellerMeasure = measureDesc; break; } } TopNMeasureType measureType = (TopNMeasureType) MeasureTypeFactory.create(topSellerMeasure.getFunction().getExpression(), topSellerMeasure.getFunction().getReturnDataType()); topSellerMeasure.getFunction().getConfiguration().clear(); List<TblColRef> colsNeedDict = measureType.getColumnsNeedDictionary(topSellerMeasure.getFunction()); assertTrue(colsNeedDict != null && colsNeedDict.size() == 1); TblColRef sellerColRef = topSellerMeasure.getFunction().getParameter().getColRefs().get(1); topSellerMeasure.getFunction().getConfiguration().put(TopNMeasureType.CONFIG_ENCODING_PREFIX + sellerColRef.getIdentity(), "int:6"); colsNeedDict = measureType.getColumnsNeedDictionary(topSellerMeasure.getFunction()); assertTrue(colsNeedDict.size() == 0); }
Example #18
Source File: ProjectManagerTest.java From Kylin with Apache License 2.0 | 5 votes |
@Test public void testExistingProject() throws Exception { ProjectManager prjMgr = ProjectManager.getInstance(getTestConfig()); CubeManager cubeMgr = CubeManager.getInstance(getTestConfig()); CubeDescManager cubeDescMgr = CubeDescManager.getInstance(getTestConfig()); int originalProjectCount = prjMgr.listAllProjects().size(); int originalCubeCount = cubeMgr.listAllCubes().size(); ResourceStore store = getStore(); // clean legacy in case last run failed store.deleteResource("/cube/new_cube_in_default.json"); CubeDesc desc = cubeDescMgr.getCubeDesc("test_kylin_cube_with_slr_desc"); CubeInstance createdCube = cubeMgr.createCube("new_cube_in_default", ProjectInstance.DEFAULT_PROJECT_NAME, desc, null); assertTrue(createdCube == cubeMgr.getCube("new_cube_in_default")); System.out.println(JsonUtil.writeValueAsIndentString(createdCube)); assertTrue(prjMgr.listAllProjects().size() == originalProjectCount); assertTrue(cubeMgr.listAllCubes().size() == originalCubeCount + 1); CubeInstance droppedCube = cubeMgr.dropCube("new_cube_in_default", true); assertTrue(createdCube == droppedCube); assertNull(cubeMgr.getCube("new_cube_in_default")); assertTrue(prjMgr.listAllProjects().size() == originalProjectCount); assertTrue(cubeMgr.listAllCubes().size() == originalCubeCount); }
Example #19
Source File: QueryGeneratorCLI.java From kylin with Apache License 2.0 | 5 votes |
private Pair<List<String>, double[]> run(String cubeName, boolean needToStore) throws Exception { CubeDesc cubeDesc = CubeDescManager.getInstance(KylinConfig.getInstanceFromEnv()).getCubeDesc(cubeName); //Generate query list List<String> queryList = QueryGenerator.generateQueryList(cubeDesc, sizeOfQueryList, maxNumOfDim); ProbabilityGeneratorCLI probabilityGeneratorCLI = new ProbabilityGeneratorCLI(); double[] pCumArray; if (needToStore) { storeQuery(queryList, outputPath + "/" + cubeName); pCumArray = probabilityGeneratorCLI.execute(queryList.size(), outputPath + "/" + cubeName); } else { pCumArray = probabilityGeneratorCLI.execute(queryList.size()); } return new Pair<>(queryList, pCumArray); }
Example #20
Source File: CubeSignatureRefresher.java From kylin with Apache License 2.0 | 5 votes |
private void verify() { try { Broadcaster.getInstance(config).notifyClearAll(); } catch (IOException e) { throw new RuntimeException(e); } DataModelManager.getInstance(config); CubeDescManager.getInstance(config); CubeManager.getInstance(config); ProjectManager.getInstance(config); }
Example #21
Source File: CubeMetadataUpgrade.java From kylin with Apache License 2.0 | 5 votes |
public void verify() { logger.info("================================================================="); logger.info("The changes are applied, now it's time to verify the new metadata store by reloading all metadata:"); logger.info("================================================================="); config.clearManagers(); DataModelManager.getInstance(config); CubeDescManager.getInstance(config); CubeManager.getInstance(config); ProjectManager.getInstance(config); //cleanup(); }
Example #22
Source File: SparkCubingMerge.java From kylin-on-parquet-v2 with Apache License 2.0 | 5 votes |
private void init() { this.kylinConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl); final CubeInstance cube = CubeManager.getInstance(kylinConfig).getCube(cubeName); final CubeDesc cubeDesc = CubeDescManager.getInstance(kylinConfig).getCubeDesc(cube.getDescName()); final CubeSegment sourceSeg = cube.getSegmentById(sourceSegmentId); final CubeSegment mergedSeg = cube.getSegmentById(mergedSegmentId); this.segmentReEncoder = new SegmentReEncoder(cubeDesc, sourceSeg, mergedSeg, kylinConfig); }
Example #23
Source File: KylinLogExtractor.java From kylin with Apache License 2.0 | 5 votes |
private void beforeExtract() { // reload metadata before extract diagnosis info logger.info("Start to reload metadata from diagnosis."); config.clearManagers(); CubeManager.getInstance(config); CubeDescManager.getInstance(config); DataModelManager.getInstance(config); ProjectManager.getInstance(config); }
Example #24
Source File: CubeMetaIngester.java From kylin with Apache License 2.0 | 5 votes |
private void injest(File metaRoot) throws IOException { KylinConfig srcConfig = KylinConfig.createInstanceFromUri(metaRoot.getAbsolutePath()); TableMetadataManager srcMetadataManager = TableMetadataManager.getInstance(srcConfig); DataModelManager srcModelManager = DataModelManager.getInstance(srcConfig); HybridManager srcHybridManager = HybridManager.getInstance(srcConfig); CubeManager srcCubeManager = CubeManager.getInstance(srcConfig); CubeDescManager srcCubeDescManager = CubeDescManager.getInstance(srcConfig); checkAndMark(srcMetadataManager, srcModelManager, srcHybridManager, srcCubeManager, srcCubeDescManager); new ResourceTool().copy(srcConfig, kylinConfig, Lists.newArrayList(requiredResources)); // clear the cache Broadcaster.getInstance(kylinConfig).notifyClearAll(); ProjectManager projectManager = ProjectManager.getInstance(kylinConfig); for (TableDesc tableDesc : srcMetadataManager.listAllTables(null)) { logger.info("add " + tableDesc + " to " + targetProjectName); projectManager.addTableDescToProject(Lists.newArrayList(tableDesc.getIdentity()).toArray(new String[0]), targetProjectName); } for (CubeInstance cube : srcCubeManager.listAllCubes()) { logger.info("add " + cube + " to " + targetProjectName); projectManager.addModelToProject(cube.getModel().getName(), targetProjectName); projectManager.moveRealizationToProject(RealizationType.CUBE, cube.getName(), targetProjectName, null); } }
Example #25
Source File: SrcClusterUtil.java From kylin with Apache License 2.0 | 5 votes |
public SrcClusterUtil(String configURI, boolean ifJobFSHAEnabled, boolean ifHBaseFSHAEnabled) throws IOException { super(configURI, ifJobFSHAEnabled, ifHBaseFSHAEnabled); this.hbaseDataDir = hbaseConf.get(hbaseRootDirConfKey) + "/data/default/"; metadataManager = TableMetadataManager.getInstance(kylinConfig); modelManager = DataModelManager.getInstance(kylinConfig); projectManager = ProjectManager.getInstance(kylinConfig); hybridManager = HybridManager.getInstance(kylinConfig); cubeManager = CubeManager.getInstance(kylinConfig); cubeDescManager = CubeDescManager.getInstance(kylinConfig); realizationRegistry = RealizationRegistry.getInstance(kylinConfig); dictionaryManager = DictionaryManager.getInstance(kylinConfig); snapshotManager = SnapshotManager.getInstance(kylinConfig); extSnapshotInfoManager = ExtTableSnapshotInfoManager.getInstance(kylinConfig); }
Example #26
Source File: DstClusterUtil.java From kylin with Apache License 2.0 | 5 votes |
public void saveCubeDesc(CubeDesc cubeDesc) throws IOException { if (ifExecute) { putMetaResource(CubeDesc.concatResourcePath(cubeDesc.getName()), cubeDesc, CubeDescManager.CUBE_DESC_SERIALIZER); } logger.info("saved cube desc {}", cubeDesc); }
Example #27
Source File: RealizationCheckTest.java From kylin with Apache License 2.0 | 5 votes |
@Test public void testRealizationCheck() { RealizationCheck realizationCheck = new RealizationCheck(); CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("ssb"); DataModelDesc dataModelDesc = cubeDesc.getModel(); IRealization iRealization = CubeInstance.create("ssb", cubeDesc); realizationCheck.addCubeIncapableReason(iRealization, RealizationCheck.IncapableReason.create(RealizationCheck.IncapableType.CUBE_NOT_CONTAIN_ALL_COLUMN)); realizationCheck.addCubeIncapableReason(iRealization, RealizationCheck.IncapableReason.create(RealizationCheck.IncapableType.CUBE_NOT_CONTAIN_ALL_COLUMN)); Assert.assertTrue(realizationCheck.getCubeIncapableReasons().size() == 1); realizationCheck.addModelIncapableReason(dataModelDesc, RealizationCheck.IncapableReason.create(RealizationCheck.IncapableType.CUBE_NOT_CONTAIN_ALL_COLUMN)); realizationCheck.addModelIncapableReason(dataModelDesc, RealizationCheck.IncapableReason.create(RealizationCheck.IncapableType.CUBE_NOT_CONTAIN_ALL_MEASURE)); realizationCheck.addModelIncapableReason(dataModelDesc, RealizationCheck.IncapableReason.notContainAllColumn(Lists.<TblColRef> newArrayList())); realizationCheck.addModelIncapableReason(dataModelDesc, RealizationCheck.IncapableReason.notContainAllColumn(Lists.<TblColRef> newArrayList())); Assert.assertTrue(realizationCheck.getModelIncapableReasons().size() == 1); Assert.assertTrue(realizationCheck.getModelIncapableReasons().get(dataModelDesc).size() == 3); realizationCheck.addModelIncapableReason(dataModelDesc, RealizationCheck.IncapableReason .notContainAllColumn(Lists.<TblColRef> newArrayList(dataModelDesc.findColumn("LO_DATE")))); Assert.assertTrue(realizationCheck.getModelIncapableReasons().size() == 1); Assert.assertTrue(realizationCheck.getModelIncapableReasons().get(dataModelDesc).size() == 4); }
Example #28
Source File: FlinkCubingMerge.java From kylin with Apache License 2.0 | 5 votes |
@Override public void open(Configuration parameters) throws Exception { KylinConfig kylinConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(sConf, metaUrl); try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig .setAndUnsetThreadLocalConfig(kylinConfig)) { CubeDesc desc = CubeDescManager.getInstance(kylinConfig).getCubeDesc(cubeName); codec = new BufferedMeasureCodec(desc.getMeasures()); } }
Example #29
Source File: FlinkCubingMerge.java From kylin-on-parquet-v2 with Apache License 2.0 | 5 votes |
@Override public void open(Configuration parameters) throws Exception { this.kylinConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl); final CubeInstance cube = CubeManager.getInstance(kylinConfig).getCube(cubeName); final CubeDesc cubeDesc = CubeDescManager.getInstance(kylinConfig).getCubeDesc(cube.getDescName()); final CubeSegment sourceSeg = cube.getSegmentById(sourceSegmentId); final CubeSegment mergedSeg = cube.getSegmentById(mergedSegmentId); this.segmentReEncoder = new SegmentReEncoder(cubeDesc, sourceSeg, mergedSeg, kylinConfig); }
Example #30
Source File: FlinkCubingMerge.java From kylin with Apache License 2.0 | 5 votes |
@Override public void open(Configuration parameters) throws Exception { this.kylinConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl); final CubeInstance cube = CubeManager.getInstance(kylinConfig).getCube(cubeName); final CubeDesc cubeDesc = CubeDescManager.getInstance(kylinConfig).getCubeDesc(cube.getDescName()); final CubeSegment sourceSeg = cube.getSegmentById(sourceSegmentId); final CubeSegment mergedSeg = cube.getSegmentById(mergedSegmentId); this.segmentReEncoder = new SegmentReEncoder(cubeDesc, sourceSeg, mergedSeg, kylinConfig); }