Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[INLONG-9147][Agent] Add log file source and source related modification #9153

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.inlong.agent.plugin.file;

import org.apache.inlong.agent.conf.InstanceProfile;
import org.apache.inlong.agent.plugin.Message;

/**
* Reader reads data and provides condition whether the reading action is finished. It's called at
* Task level.
*/
public interface Reader {

/**
* Read message
*
* @return message
*/
Message read();

/**
* Whether finish reading
*/
boolean isFinished();

/**
* Return the reader's reading source name
*/
String getReadSource();

/**
* set readTimeout
*/
void setReadTimeout(long mill);

/**
* set wait milliseconds when tailing a file
* to solve while loop cause too much cpu usage
*/
void setWaitMillisecond(long millis);

/**
* get snapshot of the reader
*/
String getSnapshot();

/**
* finish read
*/
void finishRead();

/**
* source is exist
*/
boolean isSourceExist();

/**
* init
*
* @param profile
*/
void init(InstanceProfile profile);

/**
* destroy
*/
void destroy();
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.inlong.agent.plugin.file;

import org.apache.inlong.agent.conf.InstanceProfile;
import org.apache.inlong.agent.conf.TaskProfile;
import org.apache.inlong.agent.plugin.Message;

import java.util.List;

/**
* Source can be split into multiple reader.
*/
public interface Source {

/**
* Split source into a list of readers.
*
* @param conf job conf
* @return list of reader
*/
List<Reader> split(TaskProfile conf);

/**
* Read message
*
* @return message
*/
Message read();

/**
* init
*
* @param profile
*/
void init(InstanceProfile profile);

/**
* destroy
*/
void destroy();

boolean sourceFinish();

boolean sourceExist();
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,10 @@

package org.apache.inlong.agent.plugin.sources;

import org.apache.inlong.agent.conf.JobProfile;
import org.apache.inlong.agent.plugin.Reader;
import org.apache.inlong.agent.conf.TaskProfile;
import org.apache.inlong.agent.plugin.Message;
import org.apache.inlong.agent.plugin.file.Reader;
import org.apache.inlong.agent.plugin.sources.file.AbstractSource;
import org.apache.inlong.agent.plugin.sources.reader.SqlReader;
import org.apache.inlong.agent.utils.AgentDbUtils;

Expand Down Expand Up @@ -69,8 +71,7 @@ private List<Reader> splitSqlJob(String sqlPattern) {
* @return reader list or null if database type is not correct.
*/
@Override
public List<Reader> split(JobProfile conf) {
super.init(conf);
public List<Reader> split(TaskProfile conf) {
String sqlPattern = conf.get(JOB_DATABASE_SQL, "").toLowerCase();
List<Reader> readerList = null;
if (!sqlPattern.isEmpty()) {
Expand All @@ -86,4 +87,19 @@ public List<Reader> split(JobProfile conf) {
}
return readerList;
}

@Override
public Message read() {
return null;
}

@Override
public boolean sourceFinish() {
return false;
}

@Override
public boolean sourceExist() {
return false;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,10 @@

package org.apache.inlong.agent.plugin.sources;

import org.apache.inlong.agent.conf.JobProfile;
import org.apache.inlong.agent.plugin.Reader;
import org.apache.inlong.agent.conf.TaskProfile;
import org.apache.inlong.agent.plugin.Message;
import org.apache.inlong.agent.plugin.file.Reader;
import org.apache.inlong.agent.plugin.sources.file.AbstractSource;
import org.apache.inlong.agent.plugin.sources.reader.KafkaReader;

import com.google.gson.Gson;
Expand All @@ -37,16 +39,16 @@
import java.util.Properties;
import java.util.concurrent.atomic.AtomicLong;

import static org.apache.inlong.agent.constant.JobConstants.DEFAULT_JOB_LINE_FILTER;
import static org.apache.inlong.agent.constant.JobConstants.JOB_ID;
import static org.apache.inlong.agent.constant.JobConstants.JOB_KAFKA_AUTO_COMMIT_OFFSET_RESET;
import static org.apache.inlong.agent.constant.JobConstants.JOB_KAFKA_BOOTSTRAP_SERVERS;
import static org.apache.inlong.agent.constant.JobConstants.JOB_KAFKA_GROUP_ID;
import static org.apache.inlong.agent.constant.JobConstants.JOB_KAFKA_OFFSET;
import static org.apache.inlong.agent.constant.JobConstants.JOB_KAFKA_PARTITION_OFFSET_DELIMITER;
import static org.apache.inlong.agent.constant.JobConstants.JOB_KAFKA_TOPIC;
import static org.apache.inlong.agent.constant.JobConstants.JOB_LINE_FILTER_PATTERN;
import static org.apache.inlong.agent.constant.JobConstants.JOB_OFFSET_DELIMITER;
import static org.apache.inlong.agent.constant.TaskConstants.DEFAULT_JOB_LINE_FILTER;
import static org.apache.inlong.agent.constant.TaskConstants.JOB_KAFKA_AUTO_COMMIT_OFFSET_RESET;
import static org.apache.inlong.agent.constant.TaskConstants.JOB_KAFKA_BOOTSTRAP_SERVERS;
import static org.apache.inlong.agent.constant.TaskConstants.JOB_KAFKA_GROUP_ID;
import static org.apache.inlong.agent.constant.TaskConstants.JOB_KAFKA_OFFSET;
import static org.apache.inlong.agent.constant.TaskConstants.JOB_KAFKA_PARTITION_OFFSET_DELIMITER;
import static org.apache.inlong.agent.constant.TaskConstants.JOB_KAFKA_TOPIC;
import static org.apache.inlong.agent.constant.TaskConstants.JOB_LINE_FILTER_PATTERN;
import static org.apache.inlong.agent.constant.TaskConstants.JOB_OFFSET_DELIMITER;
import static org.apache.inlong.agent.constant.TaskConstants.TASK_ID;

/**
* kafka source, split kafka source job into multi readers
Expand All @@ -70,8 +72,7 @@ public KafkaSource() {
}

@Override
public List<Reader> split(JobProfile conf) {
super.init(conf);
public List<Reader> split(TaskProfile conf) {
List<Reader> result = new ArrayList<>();
String filterPattern = conf.get(JOB_LINE_FILTER_PATTERN, DEFAULT_JOB_LINE_FILTER);

Expand Down Expand Up @@ -103,7 +104,7 @@ public List<Reader> split(JobProfile conf) {
for (PartitionInfo partitionInfo : partitionInfoList) {
props.put(JOB_KAFKA_GROUP_ID.replace(JOB_KAFKAJOB_PARAM_PREFIX, StringUtils.EMPTY),
map.getOrDefault(JOB_KAFKA_GROUP_ID,
map.get(JOB_ID) + JOB_OFFSET_DELIMITER
map.get(TASK_ID) + JOB_OFFSET_DELIMITER
+ "group" + partitionInfo.partition()));
KafkaConsumer<String, byte[]> partitonConsumer = new KafkaConsumer<>(props);
partitonConsumer.assign(Collections.singletonList(
Expand Down Expand Up @@ -134,6 +135,21 @@ public List<Reader> split(JobProfile conf) {
return result;
}

@Override
public Message read() {
return null;
}

@Override
public boolean sourceFinish() {
return false;
}

@Override
public boolean sourceExist() {
return false;
}

private void addValidator(String filterPattern, KafkaReader kafkaReader) {
kafkaReader.addPatternValidator(filterPattern);
}
Expand Down
Loading