/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.ide.common.res2; import com.android.annotations.NonNull; import com.android.annotations.Nullable; import com.android.ide.common.blame.Message; import com.android.utils.ILogger; import com.google.common.base.Objects; import com.google.common.base.Splitter; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.w3c.dom.Attr; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import java.io.File; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; /** * Represents a set of {@link DataItem}s. * * The items can be coming from multiple source folders, and duplicates are detected. * * Each source folders is considered to be at the same level. To use overlays, a * {@link DataMerger} must be used. * * Creating the set and adding folders does not load the data. * The data can be loaded from the files, or from a blob which is generated by the set itself. * * Upon loading the data from the blob, the data can be updated with fresher files. Each item * that is updated is flagged as such, in order to manage incremental update. * * Writing/Loading the blob is not done through this class directly, but instead through the * {@link DataMerger} which contains DataSet objects. */ abstract class DataSet<I extends DataItem<F>, F extends DataFile<I>> implements SourceSet, DataMap<I> { static final String NODE_SOURCE = "source"; static final String NODE_FILE = "file"; static final String ATTR_CONFIG = "config"; static final String ATTR_PATH = "path"; static final String ATTR_NAME = "name"; private final String mConfigName; private final boolean mValidateEnabled; /** * List of source files. The may not have been loaded yet. */ private final List<File> mSourceFiles = Lists.newArrayList(); /** * The key is the {@link DataItem#getKey()}. * This is a multimap to support moving a data item from one file to another (values file) * during incremental update. */ private final ListMultimap<String, I> mItems = ArrayListMultimap.create(); /** * Map of source files to DataFiles. This is a multimap because the key is the source * file/folder, not the File for the DataFile itself. */ private final ListMultimap<File, F> mSourceFileToDataFilesMap = ArrayListMultimap.create(); /** * Map from a File to its DataFile. */ private final Map<File, F> mDataFileMap = Maps.newHashMap(); /** * Creates a DataSet with a given configName. The name is used to identify the set * across sessions. * * @param configName the name of the config this set is associated with. */ public DataSet(String configName, boolean validateEnabled) { mConfigName = configName; mValidateEnabled = validateEnabled; } protected abstract DataSet<I, F> createSet(String name); /** * Creates a DataFile and associated DataItems from an XML node from a file created with * {@link DataSet#appendToXml(org.w3c.dom.Node, org.w3c.dom.Document, MergeConsumer)} * * @param file the file represented by the DataFile * @param fileNode the XML node. * @return a DataFile */ protected abstract F createFileAndItemsFromXml(@NonNull File file, @NonNull Node fileNode) throws MergingException; /** * Reads the content of a data folders and loads the DataItem. * * This should generate DataFiles, and process them with * {@link #processNewDataFile(java.io.File, DataFile, boolean)}. * * @param sourceFolder the source folder to load the resources from. * * @throws MergingException if something goes wrong */ protected abstract void readSourceFolder(File sourceFolder, ILogger logger) throws MergingException; @Nullable protected abstract F createFileAndItems(File sourceFolder, File file, ILogger logger) throws MergingException; /** * Adds a collection of source files. * @param files the source files to add. */ public void addSources(Collection<File> files) { mSourceFiles.addAll(files); } /** * Adds a new source file * @param file the source file. */ public void addSource(File file) { mSourceFiles.add(file); } /** * Get the list of source files. * @return the source files. */ @NonNull @Override public List<File> getSourceFiles() { return mSourceFiles; } /** * Returns the config name. * @return the config name. */ public String getConfigName() { return mConfigName; } /** * Returns a matching Source file that contains a given file. * * "contains" means that the source file/folder is the root folder * of this file. The folder and/or file doesn't have to exist. * * @param file the file to search for * @return the Source file or null if no match is found. */ @Override public File findMatchingSourceFile(File file) { for (File sourceFile : mSourceFiles) { if (sourceFile.equals(file)) { return sourceFile; } else if (sourceFile.isDirectory()) { String sourcePath = sourceFile.getAbsolutePath() + File.separator; if (file.getAbsolutePath().startsWith(sourcePath)) { return sourceFile; } } } return null; } /** * Returns the number of items. * @return the number of items. * * @see DataMap */ @Override public int size() { // returns the number of keys, not the size of the multimap which would include duplicate // ResourceItem objects. return mItems.keySet().size(); } /** * Returns whether the set is empty of items. * @return true if the set contains no items. */ public boolean isEmpty() { return mItems.isEmpty(); } /** * Returns a map of the items. * @return a map of items. * * @see DataMap */ @NonNull @Override public ListMultimap<String, I> getDataMap() { return mItems; } /** * Loads the DataSet from the files its source folders contain. * * All loaded items are set to TOUCHED. This is so that after loading the resources from * the files, they can be written directly (since touched force them to be written). * * This also checks for duplicates items. * * @throws MergingException if something goes wrong */ public void loadFromFiles(ILogger logger) throws MergingException { List<Message> errors = Lists.newArrayList(); for (File file : mSourceFiles) { if (file.isDirectory()) { try { readSourceFolder(file, logger); } catch (MergingException e) { errors.addAll(e.getMessages()); } } else if (file.isFile()) { // TODO support resource bundle } } MergingException.throwIfNonEmpty(errors); checkItems(); } /** * Appends the DataSet to a given DOM object. * * @param setNode the root node for this set. * @param document The root XML document */ void appendToXml(@NonNull Node setNode, @NonNull Document document, @NonNull MergeConsumer<I> consumer) { // add the config name attribute NodeUtils.addAttribute(document, setNode, null, ATTR_CONFIG, mConfigName); // add the source files. // we need to loop on the source files themselves and not the map to ensure we // write empty resourceSets for (File sourceFile : mSourceFiles) { // the node for the source and its path attribute Node sourceNode = document.createElement(NODE_SOURCE); setNode.appendChild(sourceNode); NodeUtils.addAttribute(document, sourceNode, null, ATTR_PATH, sourceFile.getAbsolutePath()); Collection<F> dataFiles = mSourceFileToDataFilesMap.get(sourceFile); for (F dataFile : dataFiles) { if (!dataFile.hasNotRemovedItems()) { continue; } // the node for the file and its path and qualifiers attribute Node fileNode = document.createElement(NODE_FILE); sourceNode.appendChild(fileNode); NodeUtils.addAttribute(document, fileNode, null, ATTR_PATH, dataFile.getFile().getAbsolutePath()); dataFile.addExtraAttributes(document, fileNode, null); switch (dataFile.getType()) { case GENERATED_FILES: // Fall through. getDetailsXml() will return the XML which describes the // generated files. case XML_VALUES: for (I item : dataFile.getItems()) { if (item.isRemoved()|| consumer.ignoreItemInMerge(item)) { continue; } Node adoptedNode = item.getDetailsXml(document); if (adoptedNode != null) { fileNode.appendChild(adoptedNode); } } break; case SINGLE_FILE: // no need to check for isRemoved here since it's checked // at the file level and there's only one item. I dataItem = dataFile.getItem(); NodeUtils.addAttribute(document, fileNode, null, ATTR_NAME, dataItem.getName()); dataItem.addExtraAttributes(document, fileNode, null); break; default: throw new IllegalStateException(); } } } } /** * Creates and returns a new DataSet from an XML node that was created with * {@link #appendToXml(org.w3c.dom.Node, org.w3c.dom.Document, MergeConsumer)} * * The object this method is called on is not modified. This should be static but can't be * due to children classes. * * @param dataSetNode the node to read from. * @return a new DataSet object or null. */ DataSet<I,F> createFromXml(Node dataSetNode) throws MergingException { // get the config name Attr configNameAttr = (Attr) dataSetNode.getAttributes().getNamedItem(ATTR_CONFIG); if (configNameAttr == null) { return null; } // create the DataSet that will be filled with the content of the XML. DataSet<I, F> dataSet = createSet(configNameAttr.getValue()); // loop on the source nodes NodeList sourceNodes = dataSetNode.getChildNodes(); for (int i = 0, n = sourceNodes.getLength(); i < n; i++) { Node sourceNode = sourceNodes.item(i); if (sourceNode.getNodeType() != Node.ELEMENT_NODE || !NODE_SOURCE.equals(sourceNode.getLocalName())) { continue; } Attr pathAttr = (Attr) sourceNode.getAttributes().getNamedItem(ATTR_PATH); if (pathAttr == null) { continue; } File sourceFolder = new File(pathAttr.getValue()); dataSet.mSourceFiles.add(sourceFolder); // now loop on the files inside the source folder. NodeList fileNodes = sourceNode.getChildNodes(); for (int j = 0, m = fileNodes.getLength(); j < m; j++) { Node fileNode = fileNodes.item(j); if (fileNode.getNodeType() != Node.ELEMENT_NODE || !NODE_FILE.equals(fileNode.getLocalName())) { continue; } pathAttr = (Attr) fileNode.getAttributes().getNamedItem(ATTR_PATH); if (pathAttr == null) { continue; } F dataFile = createFileAndItemsFromXml(new File(pathAttr.getValue()), fileNode); if (dataFile != null) { dataSet.processNewDataFile(sourceFolder, dataFile, false /*setTouched*/); } } } return dataSet; } /** * Checks for duplicate items across all source files. * * @throws DuplicateDataException if a duplicated item is found. */ protected void checkItems() throws DuplicateDataException { if (!mValidateEnabled) { return; } Collection<Collection<I>> duplicateCollections = Lists.newArrayList(); // check a list for duplicate, ignoring removed items. for (Map.Entry<String, Collection<I>> entry : mItems.asMap().entrySet()) { Collection<I> items = entry.getValue(); // there can be several version of the same key if some are "removed" I lastItem = null; for (I item : items) { if (!item.isRemoved()) { if (lastItem == null) { lastItem = item; } else { // We have duplicates, store them and throw the exception later, so // the user gets all the error messages at once. duplicateCollections.add(items); } } } } if (!duplicateCollections.isEmpty()) { throw new DuplicateDataException(DuplicateDataException.createMessages(duplicateCollections)); } } /** * Update the DataSet with a given file. * * @param sourceFolder the sourceFile containing the changedFile * @param changedFile The changed file * @param fileStatus the change state * @return true if the set was properly updated, false otherwise * @throws MergingException if something goes wrong */ public boolean updateWith(File sourceFolder, File changedFile, FileStatus fileStatus, ILogger logger) throws MergingException { switch (fileStatus) { case NEW: return handleNewFile(sourceFolder, changedFile, logger); case CHANGED: return handleChangedFile(sourceFolder, changedFile, logger); case REMOVED: return handleRemovedFile(changedFile); } return false; } protected boolean handleRemovedFile(File removedFile) { F dataFile = getDataFile(removedFile); if (dataFile == null) { return false; } // flag all resource items are removed for (I dataItem : dataFile.getItems()) { dataItem.setRemoved(); } return true; } protected boolean isValidSourceFile(@NonNull File sourceFolder, @NonNull File file) { return checkFileForAndroidRes(file); } protected boolean handleNewFile(File sourceFolder, File file, ILogger logger) throws MergingException { F dataFile = createFileAndItems(sourceFolder, file, logger); if (dataFile != null) { processNewDataFile(sourceFolder, dataFile, true /*setTouched*/); } return true; } protected void processNewDataFile(@NonNull File sourceFolder, @NonNull F dataFile, boolean setTouched) throws MergingException { Collection<I> dataItems = dataFile.getItems(); addDataFile(sourceFolder, dataFile); for (I dataItem : dataItems) { mItems.put(dataItem.getKey(), dataItem); if (setTouched) { dataItem.setTouched(); } } } protected boolean handleChangedFile( @NonNull File sourceFolder, @NonNull File changedFile, @NonNull ILogger logger) throws MergingException { F dataFile = mDataFileMap.get(changedFile); for (I item : dataFile.getItems()) { item.setTouched(); } return true; } protected void addItem(@NonNull I item, @Nullable String key) throws MergingException { if (key == null) { key = item.getKey(); } mItems.put(key, item); } protected F getDataFile(@NonNull File file) { return mDataFileMap.get(file); } /** * Adds a new DataFile to this. * * @param sourceFile the parent source file. * @param dataFile the DataFile */ private void addDataFile(@NonNull File sourceFile, @NonNull F dataFile) { mSourceFileToDataFilesMap.put(sourceFile, dataFile); mDataFileMap.put(dataFile.getFile(), dataFile); } @Override public String toString() { return Objects.toStringHelper(getClass()) .addValue(mConfigName) .add("sources", Arrays.toString(mSourceFiles.toArray())) .toString(); } /** * Checks a file to make sure it is a valid file in the android res/asset folders. * @param file the file to check * @return true if it is a valid file, false if it should be ignored. */ protected static boolean checkFileForAndroidRes(@NonNull File file) { return !isIgnored(file); } /** * The pattern to use for ignoring asset files. Defaults to the same value as aapt but * can be customized via {@code $ANDROID_AAPT_IGNORE}. * <p> * Patterns syntax: * <ul> * <li> Delimiter is : * <li> Entry can start with the flag ! to avoid printing a warning * about the file being ignored. * <li> Entry can have the flag {@code <dir>} to match only directories * or {@code <file>} to match only files. Default is to match both. * <li> Entry can be a simplified glob {@code <prefix>*} or {@code *<suffix>} * where prefix/suffix must have at least 1 character (so that * we don't match a '*' catch-all pattern.) * <li> The special filenames "." and ".." are always ignored. * <li> Otherwise the full string is matched. * <li> match is not case-sensitive. * </ul> */ private static final Iterable<String> sIgnoredPatterns; static { String patterns = System.getenv("ANDROID_AAPT_IGNORE"); //$NON-NLS-1$ if (patterns == null || patterns.isEmpty()) { // Matches aapt: frameworks/base/tools/aapt/AaptAssets.cpp:gDefaultIgnoreAssets patterns = "!.svn:!.git:!.ds_store:!*.scc:.*:<dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"; } sIgnoredPatterns = Splitter.on(':').split(patterns); } /** * Returns whether the given file should be ignored. * * @param file the file to check * @return true if the file is hidden */ public static boolean isIgnored(@NonNull File file) { String path = file.getPath(); int nameIndex = path.lastIndexOf(File.separatorChar) + 1; if (path.equals(".") || path.equals("..")) { return true; } boolean ignore = false; boolean isDirectory = file.isDirectory(); int nameLength = path.length() - nameIndex; for (String token : sIgnoredPatterns) { if (token.isEmpty()) { continue; } int tokenIndex = 0; if (token.charAt(tokenIndex) == '!') { tokenIndex++; // skip ! } if (token.regionMatches(tokenIndex, "<dir>", 0, 5)) { if (!isDirectory) { continue; } tokenIndex += 5; } if (token.regionMatches(tokenIndex, "<file>", 0, 6)) { if (isDirectory) { continue; } tokenIndex += 6; } int n = token.length() - tokenIndex; if (token.charAt(tokenIndex) == '*') { // Match *suffix such as *.scc or *~ tokenIndex++; n--; if (n <= nameLength) { ignore = token.regionMatches(true, tokenIndex, path, nameIndex + nameLength - n, n); } } else if (n > 1 && token.charAt(token.length() - 1) == '*') { // Match prefix* such as .* or _* ignore = token.regionMatches(true, tokenIndex, path, nameIndex, n - 1); } else { // Match exactly, such as thumbs.db, .git, etc. ignore = (token.length() - tokenIndex) == (path.length() - nameIndex) && token.regionMatches(true, tokenIndex, path, nameIndex, path.length() - nameIndex); } if (ignore) { break; } } return ignore; } protected boolean getValidateEnabled() { return mValidateEnabled; } }