コード例 #1
0
Py::Object
Transformation::seq_xy_tups(const Py::Tuple & args) {
  _VERBOSE("Transformation::seq_xy_tups");
  args.verify_length(1);
  
  Py::SeqBase<Py::Object> xytups = args[0];
  
  size_t Nx = xytups.length();

  if (!_frozen) eval_scalars();

  Py::Tuple ret(Nx);
  Py::SeqBase<Py::Object> xytup;
  
  
  
  for (size_t i=0; i< Nx; ++i) {
    xytup = Py::SeqBase<Py::Object>( xytups[i] );

    double thisx = Py::Float(xytup[0]);
    double thisy = Py::Float(xytup[1]);

    this->operator()(thisx, thisy);

    Py::Tuple out(2);
    out[0] = Py::Float( xy.first );
    out[1] = Py::Float( xy.second );
    ret[i] = out;
  }
  
  return ret;
}
コード例 #2
0
Py::Object 
Interval::update(const Py::Tuple &args) {
  _VERBOSE("Interval::update");
  args.verify_length(2);

  Py::SeqBase<Py::Object> vals = args[0];

  //don't use current bounds when updating box if ignore==1
  int ignore = Py::Int(args[1]);  
  size_t Nval = vals.length();
  if (Nval==0) return Py::Object();

  double minx = _val1->val();
  double maxx = _val2->val();

  double thisval;
  if (ignore) {
    thisval = Py::Float(vals[0]);
    minx = thisval;
    maxx = thisval;
  }


  for (size_t i=0; i<Nval; ++i) {
    thisval = Py::Float(vals[i]);

    if (thisval<minx) minx = thisval;
    if (thisval>maxx) maxx = thisval;
  } 


  _val1->set_api(minx);
  _val2->set_api(maxx);
  return Py::Object();
}
コード例 #3
0
Py::Object
Transformation::seq_x_y(const Py::Tuple & args) {
  _VERBOSE("Transformation::seq_x_y");
  args.verify_length(2);
  
  Py::SeqBase<Py::Object> x = args[0];
  Py::SeqBase<Py::Object> y = args[1];
  
  size_t Nx = x.length();
  size_t Ny = y.length();
  
  if (Nx!=Ny) 
    throw Py::ValueError("x and y must be equal length sequences");

  // evaluate the lazy objects  
  if (!_frozen) eval_scalars();

  Py::Tuple xo(Nx);
  Py::Tuple yo(Nx);
  
 
  for (size_t i=0; i< Nx; ++i) {
    double thisx = Py::Float(x[i]);
    double thisy = Py::Float(y[i]);
    this->operator()(thisx, thisy);
    xo[i] = Py::Float( xy.first );
    yo[i] = Py::Float( xy.second );
  }
  
  Py::Tuple ret(2);
  ret[0] = xo;
  ret[1] = yo;
  return ret;
}
コード例 #4
0
Py::Object
Bbox::count_contains(const Py::Tuple &args) {
  _VERBOSE("Bbox::count_contains");
  args.verify_length(1);

  Py::SeqBase<Py::Object> xys = args[0];
  size_t Nxys = xys.length();
  long count = 0;

  double minx = _ll->xval();
  double miny = _ll->yval();
  double maxx = _ur->xval();
  double maxy = _ur->yval();

  for(size_t i=0; i < Nxys; i++) {
    Py::SeqBase<Py::Object> xy(xys[i]);
    xy.verify_length(2);
    double x = Py::Float(xy[0]);
    double y = Py::Float(xy[1]);
    int inx = ( (x>=minx) && (x<=maxx) || (x>=maxx) && (x<=minx) );
    if (!inx) continue;
    int iny = ( (y>=miny) && (y<=maxy) || (y>=maxy) && (y<=miny) );
    if (!iny) continue;
    count += 1;
  }
  return Py::Int(count);
}
コード例 #5
0
ファイル: _path.cpp プロジェクト: dhomeier/matplotlib-py3
Py::Object
_path_module::count_bboxes_overlapping_bbox(const Py::Tuple& args)
{
    args.verify_length(2);

    Py::Object              bbox   = args[0];
    Py::SeqBase<Py::Object> bboxes = args[1];

    double ax0, ay0, ax1, ay1;
    double bx0, by0, bx1, by1;
    long count = 0;

    if (py_convert_bbox(bbox.ptr(), ax0, ay0, ax1, ay1))
    {
        if (ax1 < ax0)
        {
            std::swap(ax0, ax1);
        }
        if (ay1 < ay0)
        {
            std::swap(ay0, ay1);
        }

        size_t num_bboxes = bboxes.size();
        for (size_t i = 0; i < num_bboxes; ++i)
        {
            Py::Object bbox_b = bboxes[i];
            if (py_convert_bbox(bbox_b.ptr(), bx0, by0, bx1, by1))
            {
                if (bx1 < bx0)
                {
                    std::swap(bx0, bx1);
                }
                if (by1 < by0)
                {
                    std::swap(by0, by1);
                }
                if (!((bx1 <= ax0) ||
                      (by1 <= ay0) ||
                      (bx0 >= ax1) ||
                      (by0 >= ay1)))
                {
                    ++count;
                }
            }
            else
            {
                throw Py::ValueError("Non-bbox object in bboxes list");
            }
        }
    }
    else
    {
        throw Py::ValueError("First argument to count_bboxes_overlapping_bbox must be a Bbox object.");
    }

    return Py::Int(count);
}
コード例 #6
0
Py::Object
Bbox::update(const Py::Tuple &args) {
  _VERBOSE("Bbox::update");
  args.verify_length(2);

  Py::SeqBase<Py::Object> xys = args[0];

  //don't use current bounds on first update
  int ignore = Py::Int(args[1]);
  if (ignore==-1) {
    ignore = _ignore;
    _ignore = 0; // don't ignore future updates
  }

  size_t Nx = xys.length();
  if (Nx==0) return Py::Object();


  double minx = _ll->xval();
  double maxx = _ur->xval();
  double miny = _ll->yval();
  double maxy = _ur->yval();

  Py::Tuple tup;
  if (ignore) {
    tup = xys[0];
    double x = Py::Float(tup[0]);
    double y = Py::Float(tup[1]);

    minx=x;
    maxx=x;
    miny=y;
    maxy=y;
  }


  for (size_t i=0; i<Nx; ++i) {
    tup = xys[i];
    double x = Py::Float(tup[0]);
    double y = Py::Float(tup[1]);
    _posx.update(x);
    _posy.update(y);
    if (x<minx) minx=x;
    if (x>maxx) maxx=x;
    if (y<miny) miny=y;
    if (y>maxy) maxy=y;

  }


  _ll->x_api()->set_api(minx);
  _ll->y_api()->set_api(miny);
  _ur->x_api()->set_api(maxx);
  _ur->y_api()->set_api(maxy);
  return Py::Object();
}
コード例 #7
0
ファイル: _path.cpp プロジェクト: dhomeier/matplotlib-py3
Py::Object
_path_module::point_in_path_collection(const Py::Tuple& args)
{
    args.verify_length(9);

    //segments, trans, clipbox, colors, linewidths, antialiaseds
    double                  x                = Py::Float(args[0]);
    double                  y                = Py::Float(args[1]);
    double                  radius           = Py::Float(args[2]);
    agg::trans_affine       master_transform = py_to_agg_transformation_matrix(args[3].ptr());
    Py::SeqBase<Py::Object> paths            = args[4];
    Py::SeqBase<Py::Object> transforms_obj   = args[5];
    Py::SeqBase<Py::Object> offsets_obj      = args[6];
    agg::trans_affine       offset_trans     = py_to_agg_transformation_matrix(args[7].ptr());
    bool                    filled           = Py::Boolean(args[8]);

    PyArrayObject* offsets = (PyArrayObject*)PyArray_FromObject(
        offsets_obj.ptr(), PyArray_DOUBLE, 0, 2);
    if (!offsets ||
            (PyArray_NDIM(offsets) == 2 && PyArray_DIM(offsets, 1) != 2) ||
            (PyArray_NDIM(offsets) == 1 && PyArray_DIM(offsets, 0) != 0))
    {
        Py_XDECREF(offsets);
        throw Py::ValueError("Offsets array must be Nx2");
    }

    size_t Npaths      = paths.length();
    size_t Noffsets    = offsets->dimensions[0];
    size_t N           = std::max(Npaths, Noffsets);
    size_t Ntransforms = std::min(transforms_obj.length(), N);
    size_t i;

    // Convert all of the transforms up front
    typedef std::vector<agg::trans_affine> transforms_t;
    transforms_t transforms;
    transforms.reserve(Ntransforms);
    for (i = 0; i < Ntransforms; ++i)
    {
        agg::trans_affine trans = py_to_agg_transformation_matrix
                                  (transforms_obj[i].ptr(), false);
        trans *= master_transform;
        transforms.push_back(trans);
    }

    Py::List result;
    agg::trans_affine trans;

    for (i = 0; i < N; ++i)
    {
        PathIterator path(paths[i % Npaths]);

        if (Ntransforms)
        {
            trans = transforms[i % Ntransforms];
        }
        else
        {
            trans = master_transform;
        }

        if (Noffsets)
        {
            double xo = *(double*)PyArray_GETPTR2(offsets, i % Noffsets, 0);
            double yo = *(double*)PyArray_GETPTR2(offsets, i % Noffsets, 1);
            offset_trans.transform(&xo, &yo);
            trans *= agg::trans_affine_translation(xo, yo);
        }

        if (filled)
        {
            if (::point_in_path(x, y, path, trans))
                result.append(Py::Int((int)i));
        }
        else
        {
            if (::point_on_path(x, y, radius, path, trans))
                result.append(Py::Int((int)i));
        }
    }

    return result;
}
コード例 #8
0
ファイル: _path.cpp プロジェクト: dhomeier/matplotlib-py3
Py::Object
_path_module::get_path_collection_extents(const Py::Tuple& args)
{
    args.verify_length(5);

    //segments, trans, clipbox, colors, linewidths, antialiaseds
    agg::trans_affine       master_transform = py_to_agg_transformation_matrix(args[0].ptr());
    Py::SeqBase<Py::Object> paths            = args[1];
    Py::SeqBase<Py::Object> transforms_obj   = args[2];
    Py::Object              offsets_obj      = args[3];
    agg::trans_affine       offset_trans     = py_to_agg_transformation_matrix(args[4].ptr(), false);

    PyArrayObject* offsets = NULL;
    double x0, y0, x1, y1, xm, ym;

    try
    {
        offsets = (PyArrayObject*)PyArray_FromObject(
            offsets_obj.ptr(), PyArray_DOUBLE, 0, 2);
        if (!offsets ||
            (PyArray_NDIM(offsets) == 2 && PyArray_DIM(offsets, 1) != 2) ||
            (PyArray_NDIM(offsets) == 1 && PyArray_DIM(offsets, 0) != 0))
        {
            throw Py::ValueError("Offsets array must be Nx2");
        }

        size_t Npaths      = paths.length();
        size_t Noffsets    = offsets->dimensions[0];
        size_t N               = std::max(Npaths, Noffsets);
        size_t Ntransforms = std::min(transforms_obj.length(), N);
        size_t i;

        // Convert all of the transforms up front
        typedef std::vector<agg::trans_affine> transforms_t;
        transforms_t transforms;
        transforms.reserve(Ntransforms);
        for (i = 0; i < Ntransforms; ++i)
        {
            agg::trans_affine trans = py_to_agg_transformation_matrix
                (transforms_obj[i].ptr(), false);
            trans *= master_transform;
            transforms.push_back(trans);
        }

        // The offset each of those and collect the mins/maxs
        x0 = std::numeric_limits<double>::infinity();
        y0 = std::numeric_limits<double>::infinity();
        x1 = -std::numeric_limits<double>::infinity();
        y1 = -std::numeric_limits<double>::infinity();
        xm = std::numeric_limits<double>::infinity();
        ym = std::numeric_limits<double>::infinity();
        agg::trans_affine trans;

        for (i = 0; i < N; ++i)
        {
            PathIterator path(paths[i % Npaths]);
            if (Ntransforms)
            {
                trans = transforms[i % Ntransforms];
            }
            else
            {
                trans = master_transform;
            }

            if (Noffsets)
            {
                double xo = *(double*)PyArray_GETPTR2(offsets, i % Noffsets, 0);
                double yo = *(double*)PyArray_GETPTR2(offsets, i % Noffsets, 1);
                offset_trans.transform(&xo, &yo);
                trans *= agg::trans_affine_translation(xo, yo);
            }

            ::get_path_extents(path, trans, &x0, &y0, &x1, &y1, &xm, &ym);
        }
    }
    catch (...)
    {
        Py_XDECREF(offsets);
        throw;
    }

    Py_XDECREF(offsets);

    Py::Tuple result(4);
    result[0] = Py::Float(x0);
    result[1] = Py::Float(y0);
    result[2] = Py::Float(x1);
    result[3] = Py::Float(y1);
    return result;
}
コード例 #9
0
Py::Object
_image_module::from_images(const Py::Tuple& args) {
  _VERBOSE("_image_module::from_images");
  
  args.verify_length(3);
  
  size_t numrows = Py::Int(args[0]);
  size_t numcols = Py::Int(args[1]);
  
  Py::SeqBase<Py::Object> tups = args[2];
  size_t N = tups.length();
  
  if (N==0)
    throw Py::RuntimeError("Empty list of images");
  
  Py::Tuple tup;
  
  size_t ox(0), oy(0), thisx(0), thisy(0);
  
  //copy image 0 output buffer into return images output buffer
  Image* imo = new Image;
  imo->rowsOut  = numrows;
  imo->colsOut  = numcols;
  
  size_t NUMBYTES(numrows * numcols * imo->BPP);    
  imo->bufferOut = new agg::int8u[NUMBYTES];  
  if (imo->bufferOut==NULL) //todo: also handle allocation throw
    throw Py::MemoryError("_image_module::from_images could not allocate memory");
  
  imo->rbufOut = new agg::rendering_buffer;
  imo->rbufOut->attach(imo->bufferOut, imo->colsOut, imo->rowsOut, imo->colsOut * imo->BPP);
   
  pixfmt pixf(*imo->rbufOut);
  renderer_base rb(pixf);
  
  
  for (size_t imnum=0; imnum< N; imnum++) {
    tup = Py::Tuple(tups[imnum]);
    Image* thisim = static_cast<Image*>(tup[0].ptr());    
    if (imnum==0) 
      rb.clear(thisim->bg);
    ox = Py::Int(tup[1]);
    oy = Py::Int(tup[2]);
    
    size_t ind=0;
    for (size_t j=0; j<thisim->rowsOut; j++) {
      for (size_t i=0; i<thisim->colsOut; i++) {
	thisx = i+ox;  
	thisy = j+oy; 
	if (thisx<0 || thisx>=numcols || thisy<0 || thisy>=numrows) {
	  ind +=4;
	  continue;
	}
	
	pixfmt::color_type p;
	p.r = *(thisim->bufferOut+ind++);
	p.g = *(thisim->bufferOut+ind++);
	p.b = *(thisim->bufferOut+ind++);
	p.a = *(thisim->bufferOut+ind++);
	pixf.blend_pixel(thisx, thisy, p, 255);
      }
    }
  }
  
  return Py::asObject(imo);
  
  
  
}
コード例 #10
0
Py::Object
RendererAgg::draw_lines(const Py::Tuple& args) {
  
  theRasterizer->reset_clipping();
  _VERBOSE("RendererAgg::draw_lines");
  args.verify_length(3);  
  Py::Object gc = args[0];
  Py::SeqBase<Py::Object> x = args[1];  //todo: use numerix for efficiency
  Py::SeqBase<Py::Object> y = args[2];  //todo: use numerix for efficiency
  
  set_clip_rectangle(gc);
  size_t Nx = x.length();
  size_t Ny = y.length();
  
  if (Nx!=Ny) 
    throw Py::ValueError("x and y must be equal length sequences");
  
  
  if (Nx<2) 
    throw Py::ValueError("x and y must have length >= 2");
  
  
  agg::vcgen_stroke::line_cap_e cap = get_linecap(gc);
  agg::vcgen_stroke::line_join_e join = get_joinstyle(gc);
  
  
  double lw = points_to_pixels ( gc.getAttr("_linewidth") ) ;
  //std::cout << "agg lw " << lw << std::endl;
  agg::rgba color = get_color(gc);
  
  
  // process the dashes
  Py::Tuple dashes = get_dashes(gc);
  
  bool useDashes = dashes[0].ptr() != Py_None;
  double offset = 0;
  Py::SeqBase<Py::Object> dashSeq;
  
  if ( dashes[0].ptr() != Py_None ) { // use dashes
    //TODO: use offset
    offset = points_to_pixels_snapto(dashes[0]);
    dashSeq = dashes[1]; 
  };
  
  
  agg::path_storage path;
  
  int isaa = antialiased(gc);

  double heightd = double(height);
  if (Nx==2) { 
    // this is a little hack - len(2) lines are probably grid and
    // ticks so I'm going to snap to pixel
    //printf("snapto %d\n", Nx);
    double x0 = Py::Float(x[0]);
    double y0 = Py::Float(y[0]);
    double x1 = Py::Float(x[1]);
    double y1 = Py::Float(y[1]);
    
    if (x0==x1) {
      x0 = (int)x0 + 0.5;
      x1 = (int)x1 + 0.5;
    }

    if (y0==y1) {
      y0 = (int)y0 + 0.5;
      y1 = (int)y1 + 0.5;
    }

    y0 = heightd-y0;
    y1 = heightd-y1;

    path.move_to(x0, y0);
    path.line_to(x1, y1);
    
  }
  else {
    double thisX = Py::Float( x[0] );
    double thisY = Py::Float( y[0] );
    thisY = heightd - thisY; //flipy
    path.move_to(thisX, thisY);
    for (size_t i=1; i<Nx; ++i) {
      thisX = Py::Float( x[i] );
      thisY = Py::Float( y[i] );
      thisY = heightd - thisY;  //flipy
      //if ((i<10) || i>=19990)
      //std::cout << i << " " << Nx << " " << thisX << " " << thisY << std::endl;
      path.line_to(thisX, thisY);
    }
  }  
  
  //std::cout << width << " " << height << std::endl;
  if (! useDashes ) {
    
    agg::conv_stroke<agg::path_storage> stroke(path);
    stroke.line_cap(cap);
    stroke.line_join(join);
    stroke.width(lw);
    //freeze was here std::cout << "\t adding path!" << std::endl;         
    theRasterizer->add_path(stroke);
  }
  else {
    // set the dashes //TODO: scale for DPI
    
    size_t N = dashSeq.length();
    if (N%2 != 0  ) 
      throw Py::ValueError("dashes must be an even length sequence");     
    
    typedef agg::conv_dash<agg::path_storage> dash_t;
    dash_t dash(path);
    
    double on, off;
    
    for (size_t i=0; i<N/2; i+=1) {
      on = points_to_pixels_snapto(dashSeq[2*i]);
      off = points_to_pixels_snapto(dashSeq[2*i+1]);
      dash.add_dash(on, off);
    }
    agg::conv_stroke<dash_t> stroke(dash);
    stroke.line_cap(cap);
    stroke.line_join(join);
    stroke.width(lw);
    theRasterizer->add_path(stroke);
    
  }
  
  if ( isaa ) {
    rendererAA->color(color);    
    agg::render_scanlines(*theRasterizer, *slineP8, *rendererAA); 
  }
  else {
    rendererBin->color(color);     
    agg::render_scanlines(*theRasterizer, *slineBin, *rendererBin); 
  }
  
  return Py::Object();
  
}
コード例 #11
0
Py::Object
RendererAgg::draw_regpoly_collection(const Py::Tuple& args) {
  theRasterizer->reset_clipping();
  
  _VERBOSE("RendererAgg::draw_regpoly_collection");
  args.verify_length(9);  
  
  
  set_clip_from_bbox(args[0]);

  Py::SeqBase<Py::Object> offsets = args[1];  

  // this is throwing even though the instance is a Transformation!
  //if (!Transformation::check(args[2])) 
  // throw Py::TypeError("RendererAgg::draw_regpoly_collection(clipbox, offsets, transOffset, verts, ...) expected a Transformation instance for transOffset");
  
  Transformation* transOffset = static_cast<Transformation*>(args[2].ptr());


  transOffset->eval_scalars();

  Py::SeqBase<Py::Object> verts = args[3];  
  Py::SeqBase<Py::Object> sizes = args[4];  
  Py::SeqBase<Py::Object> facecolors = args[5];  
  Py::SeqBase<Py::Object> edgecolors = args[6];  
  Py::SeqBase<Py::Object> linewidths = args[7];  
  Py::SeqBase<Py::Object> antialiaseds = args[8];  

  size_t Noffsets = offsets.length();
  size_t Nverts = verts.length();
  size_t Nsizes = sizes.length();
  size_t Nface = facecolors.length();
  size_t Nedge = edgecolors.length();
  size_t Nlw = linewidths.length();
  size_t Naa = antialiaseds.length();
  
  double thisx, thisy;
  
  // dump the x.y vertices into a double array for faster access
  double xverts[Nverts];
  double yverts[Nverts];
  Py::Tuple xy;
  for (size_t i=0; i<Nverts; ++i) {
    xy = Py::Tuple(verts[i]);
    xverts[i] = Py::Float(xy[0]);
    yverts[i] = Py::Float(xy[1]);
  }
  
  std::pair<double, double> offsetPair;
  for (size_t i=0; i<Noffsets; ++i) {
    Py::Tuple pos = Py::Tuple(offsets[i]);
    double xo = Py::Float(pos[0]);
    double yo = Py::Float(pos[1]);
    offsetPair = transOffset->operator()(xo, yo);
    
    
    double scale = Py::Float(sizes[i%Nsizes]);
    
    
    agg::path_storage path;
    
    for (size_t j=0; j<Nverts; ++j) {
      thisx = scale*xverts[j] + offsetPair.first;
      thisy = scale*yverts[j] + offsetPair.second;
      thisy = height - thisy;
      if (j==0) path.move_to(thisx, thisy);
      else path.line_to(thisx, thisy);

      
    }
    path.close_polygon();
    int isaa = Py::Int(antialiaseds[i%Naa]);     
    // get the facecolor and render
    Py::Tuple rgba = Py::Tuple(facecolors[ i%Nface]);
    double r = Py::Float(rgba[0]);
    double g = Py::Float(rgba[1]);
    double b = Py::Float(rgba[2]);
    double a = Py::Float(rgba[3]);
    if (a>0) { //only render if alpha>0
      agg::rgba facecolor(r, g, b, a); 

      theRasterizer->add_path(path);          

      if (isaa) {
	rendererAA->color(facecolor);    
	agg::render_scanlines(*theRasterizer, *slineP8, *rendererAA); 
      }
      else {
	rendererBin->color(facecolor);    
	agg::render_scanlines(*theRasterizer, *slineBin, *rendererBin); 
      }
    } //renderer face
    
    // get the edgecolor and render
    rgba = Py::Tuple(edgecolors[ i%Nedge]);
    r = Py::Float(rgba[0]);
    g = Py::Float(rgba[1]);
    b = Py::Float(rgba[2]);
    a = Py::Float(rgba[3]);
    if (a>0) { //only render if alpha>0
      agg::rgba edgecolor(r, g, b, a); 

      agg::conv_stroke<agg::path_storage> stroke(path);
      //stroke.line_cap(cap);
      //stroke.line_join(join);
      double lw = points_to_pixels ( Py::Float( linewidths[i%Nlw] ) );
      stroke.width(lw);
      theRasterizer->add_path(stroke);

      // render antialiased or not
      if ( isaa ) {
	rendererAA->color(edgecolor);    
	agg::render_scanlines(*theRasterizer, *slineP8, *rendererAA); 
      }
      else {      
	rendererBin->color(edgecolor);    
	agg::render_scanlines(*theRasterizer, *slineBin, *rendererBin); 
      }
    } //rendered edge
    
  } // for every poly
  return Py::Object();
}
コード例 #12
0
Py::Object
RendererAgg::draw_poly_collection(const Py::Tuple& args) {
  theRasterizer->reset_clipping();
  
  _VERBOSE("RendererAgg::draw_poly_collection");

  args.verify_length(9);  
  

  Py::SeqBase<Py::Object> verts = args[0];    

  //todo: fix transformation check
  Transformation* transform = static_cast<Transformation*>(args[1].ptr());
  transform->eval_scalars();

  set_clip_from_bbox(args[2]);

  Py::SeqBase<Py::Object> facecolors = args[3];  
  Py::SeqBase<Py::Object> edgecolors = args[4];  
  Py::SeqBase<Py::Object> linewidths = args[5];  
  Py::SeqBase<Py::Object> antialiaseds = args[6];  

  
  Py::SeqBase<Py::Object> offsets;
  Transformation* transOffset = NULL;
  bool usingOffsets = args[7].ptr() != Py_None;
  if (usingOffsets) {
    offsets = args[7];  
    //todo: fix transformation check
    transOffset = static_cast<Transformation*>(args[8].ptr());
    transOffset->eval_scalars();
  }

  size_t Noffsets = offsets.length();
  size_t Nverts = verts.length();
  size_t Nface = facecolors.length();
  size_t Nedge = edgecolors.length();
  size_t Nlw = linewidths.length();
  size_t Naa = antialiaseds.length();

  size_t N = (Noffsets>Nverts) ? Noffsets : Nverts;
   
  std::pair<double, double> xyo, xy;
  Py::Tuple thisverts;
  for (size_t i=0; i<N; ++i) {

    thisverts = verts[i % Nverts];

    if (usingOffsets) {
      Py::Tuple pos = Py::Tuple(offsets[i]);
      double xo = Py::Float(pos[0]);
      double yo = Py::Float(pos[1]);
      xyo = transOffset->operator()(xo, yo);
    }

    size_t Nverts = thisverts.length();
    agg::path_storage path;
    
    Py::Tuple thisvert;

    
    // dump the verts to double arrays so we can do more efficient
    // look aheads and behinds when doing snapto pixels
    double xs[Nverts], ys[Nverts];    
    for (size_t j=0; j<Nverts; ++j) {
      thisvert = Py::Tuple(thisverts[j]);
      double x = Py::Float(thisvert[0]);
      double y = Py::Float(thisvert[1]);
      xy = transform->operator()(x, y);      

      if (usingOffsets) {
	xy.first  += xyo.first;
	xy.second += xyo.second;
      }

      xy.second = height - xy.second;
      xs[j] = xy.first;
      ys[j] = xy.second;

    }

    for (size_t j=0; j<Nverts; ++j) {

      double x = xs[j];
      double y = ys[j];

      if (j==0) {
	if (xs[j] == xs[Nverts-1]) x = (int)xs[j] + 0.5;
	if (ys[j] == ys[Nverts-1]) y = (int)ys[j] + 0.5;
      }
      else if (j==Nverts-1) {
	if (xs[j] == xs[0]) x = (int)xs[j] + 0.5;
	if (ys[j] == ys[0]) y = (int)ys[j] + 0.5;
      }

      if (j < Nverts-1) {
	if (xs[j] == xs[j+1]) x = (int)xs[j] + 0.5;
	if (ys[j] == ys[j+1]) y = (int)ys[j] + 0.5;
      }
      if (j>0) {
	if (xs[j] == xs[j-1]) x = (int)xs[j] + 0.5;
	if (ys[j] == ys[j-1]) y = (int)ys[j] + 0.5;
      }

      if (j==0) path.move_to(x,y);
      else path.line_to(x,y); 
    }

    path.close_polygon();
    int isaa = Py::Int(antialiaseds[i%Naa]);     
    // get the facecolor and render
    Py::Tuple rgba = Py::Tuple(facecolors[ i%Nface]);
    double r = Py::Float(rgba[0]);
    double g = Py::Float(rgba[1]);
    double b = Py::Float(rgba[2]);
    double a = Py::Float(rgba[3]);
    if (a>0) { //only render if alpha>0
      agg::rgba facecolor(r, g, b, a); 

      theRasterizer->add_path(path);          

      if (isaa) {
	rendererAA->color(facecolor);    
	agg::render_scanlines(*theRasterizer, *slineP8, *rendererAA);
      }
      else {
	rendererBin->color(facecolor);    
	agg::render_scanlines(*theRasterizer, *slineBin, *rendererBin);
      }
    } //renderer face
    
    // get the edgecolor and render
    rgba = Py::Tuple(edgecolors[ i%Nedge]);
    r = Py::Float(rgba[0]);
    g = Py::Float(rgba[1]);
    b = Py::Float(rgba[2]);
    a = Py::Float(rgba[3]);

    if (a>0) { //only render if alpha>0
      agg::rgba edgecolor(r, g, b, a); 

      agg::conv_stroke<agg::path_storage> stroke(path);
      //stroke.line_cap(cap);
      //stroke.line_join(join);
      double lw = points_to_pixels ( Py::Float( linewidths[i%Nlw] ) );
      stroke.width(lw);
      theRasterizer->add_path(stroke);

      // render antialiased or not
      if ( isaa ) {
	rendererAA->color(edgecolor);    
	agg::render_scanlines(*theRasterizer, *slineP8, *rendererAA); 
      }
      else {
	rendererBin->color(edgecolor);    
	agg::render_scanlines(*theRasterizer, *slineBin, *rendererBin); 
      }
    } //rendered edge
    
  } // for every poly
  return Py::Object();
}
コード例 #13
0
Py::Object
RendererAgg::draw_line_collection(const Py::Tuple& args) {
  theRasterizer->reset_clipping();
  
  _VERBOSE("RendererAgg::draw_line_collection");

  args.verify_length(8);  
  
  
  //segments, trans, clipbox, colors, linewidths, antialiaseds
  Py::SeqBase<Py::Object> segments = args[0];  


  /* this line is broken, mysteriously
  if (!Transformation::check(args[1])) 
    throw Py::TypeError("RendererAgg::draw_line_collection(segments, transform, ...) expected a Transformation instance for transform");
  
  */

  Transformation* transform = static_cast<Transformation*>(args[1].ptr());

  set_clip_from_bbox(args[2]);

  Py::SeqBase<Py::Object> colors = args[3];  
  Py::SeqBase<Py::Object> linewidths = args[4];  
  Py::SeqBase<Py::Object> antialiaseds = args[5];  

  bool usingOffsets = args[6].ptr()!=Py_None;
  Py::SeqBase<Py::Object> offsets;
  Transformation* transOffset=NULL;
  if  (usingOffsets) {
    offsets = Py::SeqBase<Py::Object>(args[6]);        
    transOffset = static_cast<Transformation*>(args[7].ptr());
  }

  size_t Nsegments = segments.length();
  size_t Nc = colors.length();
  size_t Nlw = linewidths.length();
  size_t Naa = antialiaseds.length();
  size_t Noffsets = 0;
  size_t N = Nsegments;

  if (usingOffsets) {
    Noffsets = offsets.length();
    if (Noffsets>Nsegments) N = Noffsets;
  }

  double xo(0.0), yo(0.0), thisx(0.0), thisy(0.0);
  std::pair<double, double> xy;  
  Py::Tuple xyo;
  Py::SeqBase<Py::Object> xys;
  for (size_t i=0; i<N; ++i) {
    if (usingOffsets) {
      xyo = Py::Tuple(offsets[i%Noffsets]);
      xo = Py::Float(xyo[0]);
      yo = Py::Float(xyo[1]);
      xy = transOffset->operator()(xo,yo);
      xo = xy.first;
      yo = xy.second;
    }

    xys = segments[i%Nsegments];
    size_t numtups = xys.length();
    if (numtups<2) continue;
      
    agg::path_storage path;

    for (size_t j=0; j<numtups; j++) {
      xyo = xys[j];
      thisx = Py::Float(xyo[0]);
      thisy = Py::Float(xyo[1]);
      xy = transform->operator()(thisx,thisy);
      thisx = xy.first;
      thisy = xy.second;

      if (usingOffsets) {
	thisx += xo;
	thisy += yo;
      }      
      if (j==0)  path.move_to(thisx, height-thisy);
      else       path.line_to(thisx, height-thisy);
    }

    agg::conv_stroke<agg::path_storage> stroke(path);
    //stroke.line_cap(cap);
    //stroke.line_join(join);
    double lw = points_to_pixels ( Py::Float( linewidths[i%Nlw] ) );
    
    stroke.width(lw);
    theRasterizer->add_path(stroke);

    // get the color and render
    Py::Tuple rgba = Py::Tuple(colors[ i%Nc]);
    double r = Py::Float(rgba[0]);
    double g = Py::Float(rgba[1]);
    double b = Py::Float(rgba[2]); 
    double a = Py::Float(rgba[3]);
    agg::rgba color(r, g, b, a); 

    // render antialiased or not
    int isaa = Py::Int(antialiaseds[i%Naa]);
    if ( isaa ) {
      rendererAA->color(color);    
      agg::render_scanlines(*theRasterizer, *slineP8, *rendererAA);
    }
    else {
      rendererBin->color(color);    
      agg::render_scanlines(*theRasterizer, *slineBin, *rendererBin);
    }
  } //for every segment
  return Py::Object();
}
コード例 #14
0
Py::Object
RendererAgg::draw_line_collection(const Py::Tuple& args) {
  
  
  _VERBOSE("RendererAgg::draw_line_collection");
  args.verify_length(8);  
  
  
  //segments, trans, clipbox, colors, linewidths, antialiaseds
  Py::SeqBase<Py::Object> segments = args[0];  


  /* this line is broken, mysteriously
  if (!Transformation::check(args[1])) 
    throw Py::TypeError("RendererAgg::draw_line_collection(segments, transform, ...) expected a Transformation instance for transform");
  
  */

  Transformation* transform = static_cast<Transformation*>(args[1].ptr());

  set_clip_from_bbox(args[2]);

  Py::SeqBase<Py::Object> colors = args[3];  
  Py::SeqBase<Py::Object> linewidths = args[4];  
  Py::SeqBase<Py::Object> antialiaseds = args[5];  

  bool usingOffsets = args[6].ptr()!=Py_None;
  Py::SeqBase<Py::Object> offsets;
  Transformation* transOffset=NULL;
  if  (usingOffsets) {
    /* this line is broken, mysteriously
    if (!Transformation::check(args[7])) 
      throw Py::TypeError("RendererAgg::draw_line_collection expected a Transformation instance for transOffset");
    */
    offsets = Py::SeqBase<Py::Object>(args[6]);        
    transOffset = static_cast<Transformation*>(args[7].ptr());
  }

  size_t Nsegments = segments.length();
  size_t Nc = colors.length();
  size_t Nlw = linewidths.length();
  size_t Naa = antialiaseds.length();
  size_t Noffsets = 0;
  size_t N = Nsegments;

  if (usingOffsets) {
    Noffsets = offsets.length();
    if (Noffsets>Nsegments) N = Noffsets;
  }
  

  Py::Tuple xyo, pos;
  for (size_t i=0; i<N; ++i) {
    
    pos = Py::Tuple(segments[i%Nsegments]);
    double x0 = Py::Float(pos[0]);
    double y0 = Py::Float(pos[1]);
    double x1 = Py::Float(pos[2]);
    double y1 = Py::Float(pos[3]);


    std::pair<double, double> xy = transform->operator()(x0,y0);
    x0 = xy.first;
    y0 = xy.second;

    xy = transform->operator()(x1,y1);
    x1 = xy.first;
    y1 = xy.second;

    if (usingOffsets) {
      xyo = Py::Tuple(offsets[i%Noffsets]);
      double xo = Py::Float(xyo[0]);
      double yo = Py::Float(xyo[1]);
      std::pair<double, double> xy = transOffset->operator()(xo,yo);
      x0 += xy.first;
      y0 += xy.second;
      x1 += xy.first;
      y1 += xy.second;
      
    }
    //snap x to pixel for verical lines
    if (x0==x1) {
      x0 = (int)x0 + 0.5;
      x1 = (int)x1 + 0.5;
    }
    
    //snap y to pixel for horizontal lines
    if (y0==y1) {
      y0 = (int)y0 + 0.5;
      y1 = (int)y1 + 0.5;
    }

      
    agg::path_storage path;
    path.move_to(x0, height-y0);
    path.line_to(x1, height-y1);

    agg::conv_stroke<agg::path_storage> stroke(path);
    //stroke.line_cap(cap);
    //stroke.line_join(join);
    double lw = points_to_pixels ( Py::Float( linewidths[i%Nlw] ) );
    
    stroke.width(lw);
    theRasterizer->add_path(stroke);

    // get the color and render
    Py::Tuple rgba = Py::Tuple(colors[ i%Nc]);
    double r = Py::Float(rgba[0]);
    double g = Py::Float(rgba[1]);
    double b = Py::Float(rgba[2]); 
    double a = Py::Float(rgba[3]);
    agg::rgba color(r, g, b, a); 

    // render antialiased or not
    int isaa = Py::Int(antialiaseds[i%Naa]);
    if ( isaa ) {
      theRenderer->color(color);    
      theRasterizer->render(*slineP8, *theRenderer); 
    }
    else {
      rendererBin->color(color);    
      theRasterizer->render(*slineBin, *rendererBin); 
    }
  } //for every segment
  return Py::Object();
}