Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

support for pytorch 1.5.0 #592

Open
wants to merge 11 commits into
base: master
Choose a base branch
from
Prev Previous commit
Next Next commit
Update deform_pool_cuda_kernel.cu
  • Loading branch information
RodrigoGantier authored May 26, 2020
commit ca8fca166cbcd50186a925c4aab3479dad9b2822
26 changes: 13 additions & 13 deletions alphapose/models/layers/dcn/src/deform_pool_cuda_kernel.cu
Original file line number Diff line number Diff line change
Expand Up @@ -290,11 290,11 @@ void DeformablePSROIPoolForward(const at::Tensor data,

AT_DISPATCH_FLOATING_TYPES_AND_HALF(
data.scalar_type(), "deformable_psroi_pool_forward", ([&] {
const scalar_t *bottom_data = data.data<scalar_t>();
const scalar_t *bottom_rois = bbox.data<scalar_t>();
const scalar_t *bottom_trans = no_trans ? NULL : trans.data<scalar_t>();
scalar_t *top_data = out.data<scalar_t>();
scalar_t *top_count_data = top_count.data<scalar_t>();
const scalar_t *bottom_data = data.data_ptr<scalar_t>();
const scalar_t *bottom_rois = bbox.data_ptr<scalar_t>();
const scalar_t *bottom_trans = no_trans ? NULL : trans.data_ptr<scalar_t>();
scalar_t *top_data = out.data_ptr<scalar_t>();
scalar_t *top_count_data = top_count.data_ptr<scalar_t>();

DeformablePSROIPoolForwardKernel<<<GET_BLOCKS(count), CUDA_NUM_THREADS>>>(
count, bottom_data, (scalar_t)spatial_scale, channels, height, width, pooled_height, pooled_width,
Expand Down Expand Up @@ -341,13 341,13 @@ void DeformablePSROIPoolBackwardAcc(const at::Tensor out_grad,

AT_DISPATCH_FLOATING_TYPES_AND_HALF(
out_grad.scalar_type(), "deformable_psroi_pool_backward_acc", ([&] {
const scalar_t *top_diff = out_grad.data<scalar_t>();
const scalar_t *bottom_data = data.data<scalar_t>();
const scalar_t *bottom_rois = bbox.data<scalar_t>();
const scalar_t *bottom_trans = no_trans ? NULL : trans.data<scalar_t>();
scalar_t *bottom_data_diff = in_grad.data<scalar_t>();
scalar_t *bottom_trans_diff = no_trans ? NULL : trans_grad.data<scalar_t>();
const scalar_t *top_count_data = top_count.data<scalar_t>();
const scalar_t *top_diff = out_grad.data_ptr<scalar_t>();
const scalar_t *bottom_data = data.data_ptr<scalar_t>();
const scalar_t *bottom_rois = bbox.data_ptr<scalar_t>();
const scalar_t *bottom_trans = no_trans ? NULL : trans.data_ptr<scalar_t>();
scalar_t *bottom_data_diff = in_grad.data_ptr<scalar_t>();
scalar_t *bottom_trans_diff = no_trans ? NULL : trans_grad.data_ptr<scalar_t>();
const scalar_t *top_count_data = top_count.data_ptr<scalar_t>();

DeformablePSROIPoolBackwardAccKernel<<<GET_BLOCKS(count), CUDA_NUM_THREADS>>>(
count, top_diff, top_count_data, num_rois, (scalar_t)spatial_scale, channels, height, width,
Expand All @@ -361,4 361,4 @@ void DeformablePSROIPoolBackwardAcc(const at::Tensor out_grad,
{
printf("error in DeformablePSROIPoolForward: %s\n", cudaGetErrorString(err));
}
}
}