/* method: "get_text_raw(s)\nGet the unevaluated Text from Text objects." */
static utf8_string Shape_get_text_raw(const Object& self){
  auto text = dynamic_cast<const ObjText*>(&self);
  if (!text){
    throw ValueError(space_sep(self.GetType(), "does not support text."));
  }
  return text->GetRawString();
}
/* method: "get_text_lines()->(s,...)\n
Returns the evaluated text from a Text-object split into lines. Takes
the bounding rectangle in consideration, so that the lines are split in
the same way as they appear in Faint." */
static text_lines_t Shape_get_text_lines(const Object& self){
  auto text = dynamic_cast<const ObjText*>(&self);
  if (!text){
    throw TypeError(space_sep(self.GetType(), "does not support text."));
  }

  NullExpressionContext ctx;
  return split_evaluated(ctx, *text);
}
static void faintwindow_init(faintWindowObject&){
  // Prevent instantiation from Python, since the AppContext can't be
  // provided from there.
  throw TypeError(space_sep("FaintWindow can not be instantiated.",
    "Use the 'window'-object instead."));
}
 utf8_string Name() const override{
   return space_sep("Delete", m_object->GetType(), "Point");
 }
bool ParallelCorpus::ReadPartialAlignmentFile(const string& filename) {
  std::ifstream in(filename.c_str());
  if (!in.good()) {
    return false;
  }

  typedef boost::tokenizer<boost::char_separator<char> > tokenizer;
  boost::char_separator<char> tab_sep("\t", "", boost::keep_empty_tokens);
  boost::char_separator<char> space_sep(" ");
  string line;

  set<pair<int, int> > alignment;
  PartialAlignment partial_alignment;
  int doc_index = 0;
  InitPartialAlignment(doc_pairs_.at(doc_index), &partial_alignment);
  while (getline(in, line)) {
    vector<std::string> tokens;
    tokenizer line_tokenizer(line, tab_sep);
    for (tokenizer::iterator it = line_tokenizer.begin();
         it != line_tokenizer.end(); ++it) {
      string token = *it;
      tokens.push_back(token);
    }
    if (tokens.size() == 3) {
      // (source_index)\t(0 or more positive target indices)\t(0 or more
      // negative target indices)
      int source = atoi(tokens[0].c_str());
      vector<int> pos_targets, neg_targets;
      tokenizer index_tokenizer(tokens[1], space_sep);
      for (tokenizer::iterator it = index_tokenizer.begin();
           it != index_tokenizer.end(); ++it) {
        pos_targets.push_back(atoi(it->c_str()));
      }
      index_tokenizer.assign(tokens[2], space_sep);
      for (tokenizer::iterator it = index_tokenizer.begin();
           it != index_tokenizer.end(); ++it) {
        neg_targets.push_back(atoi(it->c_str()));
      }
      for (int t = 0; t < neg_targets.size(); ++t) {
        int target = neg_targets.at(t);
        partial_alignment[source][target] = false;
      }
      // If we encounter at least one positive alignment for the source
      // sentence, we consider all target sentences as false. Otherwise, we only
      // consider the seen target sentences as false.
      // TODO
      /*
      if (pos_targets.size() > 0) {
        for (int t = 0; t < doc_pairs_.at(doc_index).second.size(); ++t) {
          partial_alignment[source][t] = false;
        }
      } */
      for (int t = 0; t < pos_targets.size(); ++t) {
        int target = pos_targets.at(t);
        partial_alignment[source][target] = true;
        alignment.insert(std::make_pair(source, target));
      }
    } else {
      // An empty line indicates a document boundary
      alignments_.push_back(alignment);
      alignment.clear();
      partial_alignments_.push_back(partial_alignment);
      doc_index++;
      if (doc_index < doc_pairs_.size()) {
        InitPartialAlignment(doc_pairs_.at(doc_index), &partial_alignment);
      }
    }
  }

  in.close();
  if (alignments_.size() != doc_pairs_.size()) {
    return false;
  }
  return true;
}
static void as_init(AppContext&){
  // Prevent instantiation from Python, since the expected context
  // can't be provided from there.
  throw TypeError(space_sep(ACTIVE_SETTINGS_NAME, "can not be instantiated."));
}