Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.stream.Collectors;


Expand Down Expand Up @@ -111,7 +112,7 @@ public enum BackupJobState {
private Map<Long, Long> unfinishedTaskIds = Maps.newConcurrentMap();
// tablet id -> snapshot info
@SerializedName("si")
private Map<Long, SnapshotInfo> snapshotInfos = Maps.newConcurrentMap();
private ConcurrentMap<Long, SnapshotInfo> snapshotInfos = Maps.newConcurrentMap();
// save all related table[partition] info
@SerializedName("meta")
private BackupMeta backupMeta;
Expand All @@ -136,7 +137,7 @@ public enum BackupJobState {

// Record partition IDs that were dropped during backup (tableId -> set of partitionIds)
@SerializedName("dp")
private Map<Long, Set<Long>> droppedPartitionsByTable = Maps.newConcurrentMap();
private ConcurrentMap<Long, Set<Long>> droppedPartitionsByTable = Maps.newConcurrentMap();

private long commitSeq = 0;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
import org.apache.doris.mysql.privilege.Auth;
import org.apache.doris.nereids.trees.plans.commands.AlterSqlBlockRuleCommand;
import org.apache.doris.nereids.trees.plans.commands.CreateSqlBlockRuleCommand;
import org.apache.doris.persist.gson.GsonPostProcessable;
import org.apache.doris.persist.gson.GsonUtils;
import org.apache.doris.qe.ConnectContext;

Expand All @@ -46,20 +45,21 @@
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.regex.Pattern;
import java.util.stream.Collectors;

/**
* Manage SqlBlockRule.
**/
public class SqlBlockRuleMgr implements Writable, GsonPostProcessable {
public class SqlBlockRuleMgr implements Writable {
private static final Logger LOG = LogManager.getLogger(SqlBlockRuleMgr.class);

private ReentrantReadWriteLock lock = new ReentrantReadWriteLock(true);

@SerializedName(value = "nameToSqlBlockRuleMap")
private Map<String, SqlBlockRule> nameToSqlBlockRuleMap = Maps.newConcurrentMap();
private ConcurrentMap<String, SqlBlockRule> nameToSqlBlockRuleMap = Maps.newConcurrentMap();

private void writeLock() {
lock.writeLock().lock();
Expand Down Expand Up @@ -327,13 +327,4 @@ public static SqlBlockRuleMgr read(DataInput in) throws IOException {
String json = Text.readString(in);
return GsonUtils.GSON.fromJson(json, SqlBlockRuleMgr.class);
}

@Override
public void gsonPostProcess() throws IOException {
Map<String, SqlBlockRule> nameToSqlBlockRuleMapNew = Maps.newConcurrentMap();
if (this.nameToSqlBlockRuleMap != null) {
nameToSqlBlockRuleMapNew.putAll(this.nameToSqlBlockRuleMap);
}
this.nameToSqlBlockRuleMap = nameToSqlBlockRuleMapNew;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.stream.Collectors;

/**
Expand Down Expand Up @@ -163,7 +164,7 @@ public static boolean isGlobalGroupName(String groupName) {
private Multimap<GroupId, Long> group2Tables = ArrayListMultimap.create();
// table_id -> group_id
@SerializedName(value = "table2Group")
private Map<Long, GroupId> table2Group = Maps.newConcurrentMap();
private ConcurrentMap<Long, GroupId> table2Group = Maps.newConcurrentMap();
// group id -> group schema
@SerializedName(value = "group2Schema")
private Map<GroupId, ColocateGroupSchema> group2Schema = Maps.newHashMap();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.stream.Collectors;

/**
Expand All @@ -65,7 +66,7 @@ public class ResourceMgr implements Writable {

// { resourceName -> Resource}
@SerializedName(value = "nameToResource")
private final Map<String, Resource> nameToResource = Maps.newConcurrentMap();
private final ConcurrentMap<String, Resource> nameToResource = Maps.newConcurrentMap();
private final ResourceProcNode procNode = new ResourceProcNode();

public ResourceMgr() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,8 @@ public class CloudReplica extends Replica {
// In the future, a replica may be mapped to multiple BEs in a cluster,
// so this value is be list
@SerializedName(value = "bes")
private Map<String, List<Long>> primaryClusterToBackends = new ConcurrentHashMap<String, List<Long>>();
private ConcurrentHashMap<String, List<Long>> primaryClusterToBackends
= new ConcurrentHashMap<String, List<Long>>();
@SerializedName(value = "dbId")
private long dbId = -1;
@SerializedName(value = "tableId")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
Expand Down Expand Up @@ -92,13 +93,13 @@ public class RuntimeProfile {
private transient ReentrantReadWriteLock infoStringsLock = new ReentrantReadWriteLock();

@SerializedName(value = "counterMap")
private Map<String, Counter> counterMap = Maps.newConcurrentMap();
private ConcurrentMap<String, Counter> counterMap = Maps.newConcurrentMap();
@SerializedName(value = "childCounterMap")
private Map<String, TreeSet<String>> childCounterMap = Maps.newConcurrentMap();
private ConcurrentMap<String, TreeSet<String>> childCounterMap = Maps.newConcurrentMap();
// protect TreeSet in ChildCounterMap
private transient ReentrantReadWriteLock counterLock = new ReentrantReadWriteLock();
@SerializedName(value = "childMap")
private Map<String, RuntimeProfile> childMap = Maps.newConcurrentMap();
private ConcurrentMap<String, RuntimeProfile> childMap = Maps.newConcurrentMap();
@SerializedName(value = "childList")
private LinkedList<Pair<RuntimeProfile, Boolean>> childList = Lists.newLinkedList();
private transient ReentrantReadWriteLock childLock = new ReentrantReadWriteLock();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentMap;
import java.util.function.Function;
import java.util.stream.Collectors;

Expand All @@ -85,7 +86,8 @@ public class CatalogMgr implements Writable, GsonPostProcessable {
private final MonitoredReentrantReadWriteLock lock = new MonitoredReentrantReadWriteLock(true);

@SerializedName(value = "idToCatalog")
private Map<Long, CatalogIf<? extends DatabaseIf<? extends TableIf>>> idToCatalog = Maps.newConcurrentMap();
private ConcurrentMap<Long, CatalogIf<? extends DatabaseIf<? extends TableIf>>> idToCatalog
= Maps.newConcurrentMap();
// this map will be regenerated from idToCatalog, so not need to persist.
private Map<String, CatalogIf> nameToCatalog = Maps.newConcurrentMap();

Expand Down Expand Up @@ -777,16 +779,11 @@ public void write(DataOutput out) throws IOException {

@Override
public void gsonPostProcess() throws IOException {
// After deserializing from Gson, the concurrent map may become a normal map.
// So here we reconstruct the concurrent map.
Map<Long, CatalogIf<? extends DatabaseIf<? extends TableIf>>> newIdToCatalog = Maps.newConcurrentMap();
Map<String, CatalogIf> newNameToCatalog = Maps.newConcurrentMap();
for (CatalogIf catalog : idToCatalog.values()) {
newNameToCatalog.put(catalog.getName(), catalog);
newIdToCatalog.put(catalog.getId(), catalog);
// ATTN: can not call catalog.getProperties() here, because ResourceMgr is not replayed yet.
}
this.idToCatalog = newIdToCatalog;
this.nameToCatalog = newNameToCatalog;
internalCatalog = (InternalCatalog) idToCatalog.get(InternalCatalog.INTERNAL_CATALOG_ID);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
Expand All @@ -91,12 +92,12 @@ public class DictionaryManager extends MasterDaemon implements Writable {

// Map of database name -> dictionary name -> dictionary id
@SerializedName(value = "ids")
private Map<String, Map<String, Long>> dictionaryIds = Maps.newConcurrentMap();
private ConcurrentMap<String, ConcurrentMap<String, Long>> dictionaryIds = Maps.newConcurrentMap();
// dbname -> tablename -> dict id
@SerializedName(value = "t")
private Map<String, ListMultimap<String, Long>> dbTableToDicIds = Maps.newConcurrentMap();
private ConcurrentMap<String, ListMultimap<String, Long>> dbTableToDicIds = Maps.newConcurrentMap();
@SerializedName(value = "idmap")
private Map<Long, Dictionary> idToDictionary = Maps.newConcurrentMap();
private ConcurrentMap<Long, Dictionary> idToDictionary = Maps.newConcurrentMap();

@SerializedName(value = "i")
private long uniqueId = 0;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;

Expand All @@ -55,19 +56,19 @@ public class InsertOverwriteManager extends MasterDaemon implements Writable {
private static final long CLEAN_INTERVAL_SECOND = 10;

@SerializedName(value = "tasks")
private Map<Long, InsertOverwriteTask> tasks = Maps.newConcurrentMap();
private ConcurrentMap<Long, InsertOverwriteTask> tasks = Maps.newConcurrentMap();

// <txnId, <dbId, tableId>>
// for iot auto detect tasks. a txn will make many task by different rpc
@SerializedName(value = "taskGroups")
private Map<Long, List<Long>> taskGroups = Maps.newConcurrentMap();
private ConcurrentMap<Long, List<Long>> taskGroups = Maps.newConcurrentMap();
// for one task group, there may be different requests about changing a partition to new.
// but we only change one time and save the relations in partitionPairs. they're protected by taskLocks
@SerializedName(value = "taskLocks")
private Map<Long, ReentrantLock> taskLocks = Maps.newConcurrentMap();
private ConcurrentMap<Long, ReentrantLock> taskLocks = Maps.newConcurrentMap();
// <groupId, <oldPartId, newPartId>>. no need concern which task it belongs to.
@SerializedName(value = "partitionPairs")
private Map<Long, Map<Long, Long>> partitionPairs = Maps.newConcurrentMap();
private ConcurrentMap<Long, Map<Long, Long>> partitionPairs = Maps.newConcurrentMap();

// TableId running insert overwrite
// dbId ==> Set<tableId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@
import java.util.Map;
import java.util.Map.Entry;
import java.util.UUID;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;

public class DeleteHandler implements Writable {
Expand All @@ -64,7 +65,7 @@ public class DeleteHandler implements Writable {

// Db -> DeleteInfo list
@SerializedName(value = "dbToDeleteInfos")
private final Map<Long, List<DeleteInfo>> dbToDeleteInfos;
private final ConcurrentMap<Long, List<DeleteInfo>> dbToDeleteInfos;

private final ReentrantReadWriteLock lock;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,21 +34,22 @@
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;

public class MTMVRefreshPartitionSnapshot {
private static final Logger LOG = LogManager.getLogger(MTMV.class);
// old version only support one pct table
@Deprecated
@SerializedName("p")
private Map<String, MTMVSnapshotIf> partitions;
private ConcurrentMap<String, MTMVSnapshotIf> partitions;
@SerializedName("pcts")
private Map<BaseTableInfo, Map<String, MTMVSnapshotIf>> pcts;
private ConcurrentMap<BaseTableInfo, Map<String, MTMVSnapshotIf>> pcts;
// old version only persist table id, we need `BaseTableInfo`, `tables` only for compatible old version
@SerializedName("t")
@Deprecated
private Map<Long, MTMVSnapshotIf> tables;
private ConcurrentMap<Long, MTMVSnapshotIf> tables;
@SerializedName("ti")
private Map<BaseTableInfo, MTMVSnapshotIf> tablesInfo;
private ConcurrentMap<BaseTableInfo, MTMVSnapshotIf> tablesInfo;

public MTMVRefreshPartitionSnapshot() {
this.partitions = Maps.newConcurrentMap();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,11 @@
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;

public class MTMVRefreshSnapshot {
@SerializedName("ps")
private Map<String, MTMVRefreshPartitionSnapshot> partitionSnapshots;
private ConcurrentMap<String, MTMVRefreshPartitionSnapshot> partitionSnapshots;

public MTMVRefreshSnapshot() {
this.partitionSnapshots = Maps.newConcurrentMap();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,11 @@
import java.io.DataOutput;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;

public class PasswordPolicyManager implements Writable {
@SerializedName(value = "policyMap")
private Map<UserIdentity, PasswordPolicy> policyMap = Maps.newConcurrentMap();
private ConcurrentMap<UserIdentity, PasswordPolicy> policyMap = Maps.newConcurrentMap();

public PasswordPolicyManager() {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;

public class Role implements GsonPostProcessable {
private static final Logger LOG = LogManager.getLogger(Role.class);
Expand Down Expand Up @@ -80,17 +81,17 @@ public class Role implements GsonPostProcessable {
private String comment;
// Will be persisted
@SerializedName(value = "tblPatternToPrivs")
private Map<TablePattern, PrivBitSet> tblPatternToPrivs = Maps.newConcurrentMap();
private ConcurrentMap<TablePattern, PrivBitSet> tblPatternToPrivs = Maps.newConcurrentMap();
@SerializedName(value = "resourcePatternToPrivs")
private Map<ResourcePattern, PrivBitSet> resourcePatternToPrivs = Maps.newConcurrentMap();
private ConcurrentMap<ResourcePattern, PrivBitSet> resourcePatternToPrivs = Maps.newConcurrentMap();
@SerializedName(value = "storageVaultPatternToPrivs")
private Map<ResourcePattern, PrivBitSet> storageVaultPatternToPrivs = Maps.newConcurrentMap();
private ConcurrentMap<ResourcePattern, PrivBitSet> storageVaultPatternToPrivs = Maps.newConcurrentMap();
@SerializedName(value = "clusterPatternToPrivs")
private Map<ResourcePattern, PrivBitSet> clusterPatternToPrivs = Maps.newConcurrentMap();
private ConcurrentMap<ResourcePattern, PrivBitSet> clusterPatternToPrivs = Maps.newConcurrentMap();
@SerializedName(value = "stagePatternToPrivs")
private Map<ResourcePattern, PrivBitSet> stagePatternToPrivs = Maps.newConcurrentMap();
private ConcurrentMap<ResourcePattern, PrivBitSet> stagePatternToPrivs = Maps.newConcurrentMap();
@SerializedName(value = "workloadGroupPatternToPrivs")
private Map<WorkloadGroupPattern, PrivBitSet> workloadGroupPatternToPrivs = Maps.newConcurrentMap();
private ConcurrentMap<WorkloadGroupPattern, PrivBitSet> workloadGroupPatternToPrivs = Maps.newConcurrentMap();
@SerializedName(value = "colPrivMap")
private Map<ColPrivilegeKey, Set<String>> colPrivMap = Maps.newHashMap();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors;

Expand All @@ -68,7 +69,7 @@ public class PolicyMgr implements Writable {
private ReentrantReadWriteLock lock = new ReentrantReadWriteLock(true);

@SerializedName(value = "typeToPolicyMap")
private Map<PolicyTypeEnum, List<Policy>> typeToPolicyMap = Maps.newConcurrentMap();
private ConcurrentMap<PolicyTypeEnum, List<Policy>> typeToPolicyMap = Maps.newConcurrentMap();

// ctlName -> dbName -> tableName -> List<RowPolicy>
private Map<String, Map<String, Map<String, List<RowPolicy>>>> tablePolicies = Maps.newConcurrentMap();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,14 +60,15 @@
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;

public class WorkloadSchedPolicyMgr extends MasterDaemon implements Writable, GsonPostProcessable {

private static final Logger LOG = LogManager.getLogger(WorkloadSchedPolicyMgr.class);

@SerializedName(value = "idToPolicy")
private Map<Long, WorkloadSchedPolicy> idToPolicy = Maps.newConcurrentMap();
private ConcurrentMap<Long, WorkloadSchedPolicy> idToPolicy = Maps.newConcurrentMap();
private Map<String, WorkloadSchedPolicy> nameToPolicy = Maps.newHashMap();

private PolicyProcNode policyProcNode = new PolicyProcNode();
Expand Down
Loading
Loading